From 0d3d3d8403344e9cf1dbc4e306bd895f98948483 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 23 Jul 2018 17:36:20 +0100 Subject: [PATCH] fix: return cids from builder Enforces CID Version 1 for non-sha2-256 hash algorithms and returns the buffer that makes up the CID instead of the buffer that makes up the multihash. Returning the multihash for v0 CIDs is fine because the codec and version are implicity but if we want to be able to load DAGNodes stored with a v1 CID, we need to return the CID buffer instead. --- src/builder/builder.js | 15 ++++++++++---- test/builder.js | 45 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/src/builder/builder.js b/src/builder/builder.js index 44023906..b0c117ed 100644 --- a/src/builder/builder.js +++ b/src/builder/builder.js @@ -26,7 +26,12 @@ const defaultOptions = { module.exports = function builder (createChunker, ipld, createReducer, _options) { const options = extend({}, defaultOptions, _options) - options.cidVersion = options.cidVersion || 0 + options.cidVersion = options.cidVersion || options.cidVersion + options.hashAlg = options.hashAlg || defaultOptions.hashAlg + + if (options.hashAlg !== 'sha2-256') { + options.cidVersion = 1 + } return function (source) { return function (items, cb) { @@ -71,8 +76,10 @@ module.exports = function builder (createChunker, ipld, createReducer, _options) return cb(null, node) } + node.cid = new CID(options.cidVersion, 'dag-pb', node.multihash) + ipld.put(node, { - cid: new CID(options.cidVersion, 'dag-pb', node.multihash) + cid: node.cid }, (err) => cb(err, node)) } ], (err, node) => { @@ -81,7 +88,7 @@ module.exports = function builder (createChunker, ipld, createReducer, _options) } callback(null, { path: item.path, - multihash: node.multihash, + multihash: node.cid.buffer, size: node.size }) }) @@ -155,7 +162,7 @@ module.exports = function builder (createChunker, ipld, createReducer, _options) pull.map((leaf) => { return { path: file.path, - multihash: leaf.multihash, + multihash: leaf.cid.buffer, size: leaf.size, leafSize: leaf.leafSize, name: '', diff --git a/test/builder.js b/test/builder.js index 586d907c..f5d7f976 100644 --- a/test/builder.js +++ b/test/builder.js @@ -40,11 +40,13 @@ module.exports = (repo) => { const node = nodes[0] expect(node).to.exist() + const cid = new CID(node.multihash) + // Verify multihash has been encoded using hashAlg - expect(mh.decode(node.multihash).name).to.equal(hashAlg) + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) // Fetch using hashAlg encoded multihash - ipld.get(new CID(node.multihash), (err, res) => { + ipld.get(cid, (err, res) => { if (err) return cb(err) const content = UnixFS.unmarshal(res.value.data).data expect(content.equals(inputFile.content)).to.be.true() @@ -77,7 +79,8 @@ module.exports = (repo) => { try { expect(node).to.exist() - expect(mh.decode(node.multihash).name).to.equal(hashAlg) + const cid = new CID(node.multihash) + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) } catch (err) { return cb(err) } @@ -92,5 +95,41 @@ module.exports = (repo) => { ) }, done) }) + + it('allows multihash hash algorithm to be specified for a directory', (done) => { + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const inputFile = { + path: `${String(Math.random() + Date.now())}-dir`, + content: null + } + + const onCollected = (err, nodes) => { + if (err) return cb(err) + + const node = nodes[0] + + expect(node).to.exist() + + const cid = new CID(node.multihash) + + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + + // Fetch using hashAlg encoded multihash + ipld.get(cid, (err, res) => { + if (err) return cb(err) + const meta = UnixFS.unmarshal(res.value.data) + expect(meta.type).to.equal('directory') + cb() + }) + } + + pull( + pull.values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + pull.collect(onCollected) + ) + }, done) + }) }) }