From d96cf12d09449498bf4d909e7e5cb597f2177821 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 16 Apr 2020 08:49:45 +0100 Subject: [PATCH 1/2] refactor: use the block API from ipfs instead of ipld internals This improves resusability of the module as it can be used by passing part of an `ipfs` or `ipfs-http-client` instance in. It also means we no longer double-serialize blocks before adding them which delivers a small but almost unperceptible performance increase. Finally also documents the `pin` and `preload` arguments. --- packages/ipfs-unixfs-importer/README.md | 2 + packages/ipfs-unixfs-importer/package.json | 2 +- .../src/dag-builder/dir.js | 8 +- .../src/dag-builder/file/buffer-importer.js | 18 +-- .../src/dag-builder/file/index.js | 31 ++++-- .../src/dag-builder/index.js | 6 +- packages/ipfs-unixfs-importer/src/dir-flat.js | 12 +- .../ipfs-unixfs-importer/src/dir-sharded.js | 31 +++--- packages/ipfs-unixfs-importer/src/index.js | 8 +- .../ipfs-unixfs-importer/src/tree-builder.js | 4 +- .../ipfs-unixfs-importer/src/utils/persist.js | 35 +++--- .../test/builder-dir-sharding.spec.js | 17 +-- .../test/builder-only-hash.spec.js | 5 +- .../ipfs-unixfs-importer/test/builder.spec.js | 11 +- .../test/chunker-custom.spec.js | 7 +- .../test/hash-parity-with-go-ipfs.spec.js | 5 +- .../test/helpers/block.js | 35 ++++++ .../test/import-export-nested-dir.spec.js | 5 +- .../test/import-export.spec.js | 5 +- .../test/importer.spec.js | 104 +++++++++--------- 20 files changed, 209 insertions(+), 142 deletions(-) create mode 100644 packages/ipfs-unixfs-importer/test/helpers/block.js diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index d9ef06e2..3ec407e0 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -140,6 +140,8 @@ The input's file paths and directory structure will be preserved in the [`dag-pb - `leafType` (string, defaults to `'file'`) what type of UnixFS node leaves should be - can be `'file'` or `'raw'` (ignored when `rawLeaves` is `true`) - `blockWriteConcurrency` (positive integer, defaults to 10) How many blocks to hash and write to the block store concurrently. For small numbers of large files this should be high (e.g. 50). - `fileImportConcurrency` (number, defaults to 50) How many files to import concurrently. For large numbers of small files this should be high (e.g. 50). +- `pin` (boolean, defaults to `false`) Whether to pin each block as it is created +- `preload` (boolean, defaults to `false`) Whether to preload each block as it is created ## Overriding internals diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 86604799..0f4ff5ae 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -46,6 +46,7 @@ "ipld-in-memory": "^3.0.0", "it-buffer-stream": "^1.0.0", "it-last": "^1.0.1", + "multicodec": "^1.0.0", "nyc": "^15.0.0", "sinon": "^9.0.1" }, @@ -60,7 +61,6 @@ "it-first": "^1.0.1", "it-parallel-batch": "^1.0.3", "merge-options": "^2.0.0", - "multicodec": "^1.0.0", "multihashing-async": "^0.8.0", "rabin-wasm": "^0.1.1" } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js index 42cce150..c3c0908a 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js @@ -6,22 +6,22 @@ const { DAGNode } = require('ipld-dag-pb') -const dirBuilder = async (item, ipld, options) => { +const dirBuilder = async (item, block, options) => { const unixfs = new UnixFS({ type: 'directory', mtime: item.mtime, mode: item.mode }) - const node = new DAGNode(unixfs.marshal(), []) - const cid = await persist(node, ipld, options) + const buffer = new DAGNode(unixfs.marshal()).serialize() + const cid = await persist(buffer, block, options) const path = item.path return { cid, path, unixfs, - size: node.size + size: buffer.length } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js index 88d89bde..f5666ad3 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js @@ -6,22 +6,17 @@ const { DAGNode } = require('ipld-dag-pb') -async function * bufferImporter (file, source, ipld, options) { - for await (const buffer of source) { +async function * bufferImporter (file, source, block, options) { + for await (let buffer of source) { yield async () => { options.progress(buffer.length) - let node let unixfs - let size const opts = { ...options } if (options.rawLeaves) { - node = buffer - size = buffer.length - opts.codec = 'raw' opts.cidVersion = 1 } else { @@ -32,16 +27,13 @@ async function * bufferImporter (file, source, ipld, options) { mode: file.mode }) - node = new DAGNode(unixfs.marshal()) - size = node.size + buffer = new DAGNode(unixfs.marshal()).serialize() } - const cid = await persist(node, ipld, opts) - return { - cid: cid, + cid: await persist(buffer, block, opts), unixfs, - size + size: buffer.length } } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js index 973ac439..19c2efee 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js @@ -9,7 +9,7 @@ const { } = require('ipld-dag-pb') const all = require('it-all') const parallelBatch = require('it-parallel-batch') -const mc = require('multicodec') +const mh = require('multihashing-async').multihash const dagBuilders = { flat: require('./flat'), @@ -17,7 +17,7 @@ const dagBuilders = { trickle: require('./trickle') } -async function * buildFileBatch (file, source, ipld, options) { +async function * buildFileBatch (file, source, block, options) { let count = -1 let previous let bufferImporter @@ -28,7 +28,7 @@ async function * buildFileBatch (file, source, ipld, options) { bufferImporter = require('./buffer-importer') } - for await (const entry of parallelBatch(bufferImporter(file, source, ipld, options), options.blockWriteConcurrency)) { + for await (const entry of parallelBatch(bufferImporter(file, source, block, options), options.blockWriteConcurrency)) { count++ if (count === 0) { @@ -48,7 +48,7 @@ async function * buildFileBatch (file, source, ipld, options) { } } -const reduce = (file, ipld, options) => { +const reduce = (file, block, options) => { return async function (leaves) { if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) { const leaf = leaves[0] @@ -56,7 +56,7 @@ const reduce = (file, ipld, options) => { if (leaf.cid.codec === 'raw' && (file.mtime !== undefined || file.mode !== undefined)) { // only one leaf node which is a buffer - we have metadata so convert it into a // UnixFS entry otherwise we'll have nowhere to store the metadata - const buffer = await ipld.get(leaf.cid) + let { data: buffer } = await block.get(leaf.cid, options) leaf.unixfs = new UnixFS({ type: 'file', @@ -65,10 +65,16 @@ const reduce = (file, ipld, options) => { data: buffer }) - const node = new DAGNode(leaf.unixfs.marshal()) + const multihash = mh.decode(leaf.cid.multihash) + buffer = new DAGNode(leaf.unixfs.marshal()).serialize() - leaf.cid = await ipld.put(node, mc.DAG_PB, options) - leaf.size = node.size + leaf.cid = await persist(buffer, block, { + ...options, + codec: 'dag-pb', + hashAlg: multihash.name, + cidVersion: options.cidVersion + }) + leaf.size = buffer.length } return { @@ -118,25 +124,26 @@ const reduce = (file, ipld, options) => { }) const node = new DAGNode(f.marshal(), links) - const cid = await persist(node, ipld, options) + const buffer = node.serialize() + const cid = await persist(buffer, block, options) return { cid, path: file.path, unixfs: f, - size: node.size + size: buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0) } } } -const fileBuilder = async (file, source, ipld, options) => { +const fileBuilder = async (file, source, block, options) => { const dagBuilder = dagBuilders[options.strategy] if (!dagBuilder) { throw errCode(new Error(`Unknown importer build strategy name: ${options.strategy}`), 'ERR_BAD_STRATEGY') } - const roots = await all(dagBuilder(buildFileBatch(file, source, ipld, options), reduce(file, ipld, options), options)) + const roots = await all(dagBuilder(buildFileBatch(file, source, block, options), reduce(file, block, options), options)) if (roots.length > 1) { throw errCode(new Error('expected a maximum of 1 roots and got ' + roots.length), 'ETOOMANYROOTS') diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/index.js index a55888d4..dc591c46 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.js @@ -3,7 +3,7 @@ const dirBuilder = require('./dir') const fileBuilder = require('./file') -async function * dagBuilder (source, ipld, options) { +async function * dagBuilder (source, block, options) { for await (const entry of source) { if (entry.path) { if (entry.path.substring(0, 2) === './') { @@ -47,10 +47,10 @@ async function * dagBuilder (source, ipld, options) { } // item is a file - yield () => fileBuilder(entry, chunker(chunkValidator(source, options), options), ipld, options) + yield () => fileBuilder(entry, chunker(chunkValidator(source, options), options), block, options) } else { // item is a directory - yield () => dirBuilder(entry, ipld, options) + yield () => dirBuilder(entry, block, options) } } } diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.js b/packages/ipfs-unixfs-importer/src/dir-flat.js index 50866044..d5ac982f 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.js +++ b/packages/ipfs-unixfs-importer/src/dir-flat.js @@ -50,7 +50,7 @@ class DirFlat extends Dir { } } - async * flush (path, ipld) { + async * flush (path, block) { const children = Object.keys(this._children) const links = [] @@ -58,7 +58,7 @@ class DirFlat extends Dir { let child = this._children[children[i]] if (typeof child.flush === 'function') { - for await (const entry of child.flush(child.path, ipld)) { + for await (const entry of child.flush(child.path, block)) { child = entry yield child @@ -75,16 +75,18 @@ class DirFlat extends Dir { }) const node = new DAGNode(unixfs.marshal(), links) - const cid = await persist(node, ipld, this.options) + const buffer = node.serialize() + const cid = await persist(buffer, block, this.options) + const size = buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0) this.cid = cid - this.size = node.size + this.size = size yield { cid, unixfs, path, - size: node.size + size } } } diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.js b/packages/ipfs-unixfs-importer/src/dir-sharded.js index e2959845..4378d289 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.js +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.js @@ -76,8 +76,8 @@ class DirSharded extends Dir { } } - async * flush (path, ipld) { - for await (const entry of flush(path, this._bucket, ipld, this, this.options)) { + async * flush (path, block) { + for await (const entry of flush(path, this._bucket, block, this, this.options)) { yield entry } } @@ -87,9 +87,10 @@ module.exports = DirSharded module.exports.hashFn = hashFn -async function * flush (path, bucket, ipld, shardRoot, options) { +async function * flush (path, bucket, block, shardRoot, options) { const children = bucket._children const links = [] + let childrenSize = 0 for (let i = 0; i < children.length; i++) { const child = children.get(i) @@ -103,16 +104,17 @@ async function * flush (path, bucket, ipld, shardRoot, options) { if (Bucket.isBucket(child)) { let shard - for await (const subShard of await flush('', child, ipld, null, options)) { + for await (const subShard of await flush('', child, block, null, options)) { shard = subShard } links.push(new DAGLink(labelPrefix, shard.size, shard.cid)) + childrenSize += shard.size } else if (typeof child.value.flush === 'function') { const dir = child.value let flushedDir - for await (const entry of dir.flush(dir.path, ipld)) { + for await (const entry of dir.flush(dir.path, block)) { flushedDir = entry yield flushedDir @@ -120,21 +122,20 @@ async function * flush (path, bucket, ipld, shardRoot, options) { const label = labelPrefix + child.key links.push(new DAGLink(label, flushedDir.size, flushedDir.cid)) + + childrenSize += flushedDir.size } else { const value = child.value - if (!value.node) { - if (value.cid) { - value.node = await ipld.get(value.cid) - } else { - continue - } + if (!value.cid) { + continue } const label = labelPrefix + child.key - const size = value.node.length || value.node.size || value.node.Size + const size = value.size links.push(new DAGLink(label, size, value.cid)) + childrenSize += size } } @@ -151,12 +152,14 @@ async function * flush (path, bucket, ipld, shardRoot, options) { }) const node = new DAGNode(dir.marshal(), links) - const cid = await persist(node, ipld, options) + const buffer = node.serialize() + const cid = await persist(buffer, block, options) + const size = buffer.length + childrenSize yield { cid, unixfs: dir, path, - size: node.size + size } } diff --git a/packages/ipfs-unixfs-importer/src/index.js b/packages/ipfs-unixfs-importer/src/index.js index 052acff3..a6b1f2da 100644 --- a/packages/ipfs-unixfs-importer/src/index.js +++ b/packages/ipfs-unixfs-importer/src/index.js @@ -25,15 +25,15 @@ const defaultOptions = { maxChildrenPerNode: 174, layerRepeat: 4, wrapWithDirectory: false, - pin: true, + pin: false, recursive: false, hidden: false, - preload: true, + preload: false, chunkValidator: null, importBuffer: null } -module.exports = async function * (source, ipld, options = {}) { +module.exports = async function * (source, block, options = {}) { const opts = mergeOptions(defaultOptions, options) if (options.cidVersion > 0 && options.rawLeaves === undefined) { @@ -74,7 +74,7 @@ module.exports = async function * (source, ipld, options = {}) { treeBuilder = require('./tree-builder') } - for await (const entry of treeBuilder(parallelBatch(dagBuilder(source, ipld, opts), opts.fileImportConcurrency), ipld, opts)) { + for await (const entry of treeBuilder(parallelBatch(dagBuilder(source, block, opts), opts.fileImportConcurrency), block, opts)) { yield { cid: entry.cid, path: entry.path, diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.js b/packages/ipfs-unixfs-importer/src/tree-builder.js index feb9f42d..46012697 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.js +++ b/packages/ipfs-unixfs-importer/src/tree-builder.js @@ -51,7 +51,7 @@ async function addToTree (elem, tree, options) { return tree } -async function * treeBuilder (source, ipld, options) { +async function * treeBuilder (source, block, options) { let tree = new DirFlat({ root: true, dir: true, @@ -94,7 +94,7 @@ async function * treeBuilder (source, ipld, options) { return } - yield * tree.flush(tree.path, ipld) + yield * tree.flush(tree.path, block) } module.exports = treeBuilder diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.js b/packages/ipfs-unixfs-importer/src/utils/persist.js index e6970b65..afdb352e 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.js +++ b/packages/ipfs-unixfs-importer/src/utils/persist.js @@ -1,33 +1,36 @@ 'use strict' -const mh = require('multihashing-async').multihash -const mc = require('multicodec') - -const persist = (node, ipld, options) => { - if (!options.codec && node.length) { - options.cidVersion = 1 - options.codec = 'raw' - } +const mh = require('multihashing-async') +const CID = require('cids') +const persist = async (buffer, block, options) => { if (!options.codec) { options.codec = 'dag-pb' } - if (isNaN(options.hashAlg)) { - options.hashAlg = mh.names[options.hashAlg] + if (!options.cidVersion) { + options.cidVersion = 0 } - if (options.hashAlg !== mh.names['sha2-256']) { - options.cidVersion = 1 + if (!options.hashAlg) { + options.hashAlg = 'sha2-256' } - if (options.format) { - options.codec = options.format + if (options.hashAlg !== 'sha2-256') { + options.cidVersion = 1 } - const format = mc[options.codec.toUpperCase().replace(/-/g, '_')] + const multihash = await mh(buffer, options.hashAlg) + const cid = new CID(options.cidVersion, options.codec, multihash) + + if (!options.onlyHash) { + await block.put(buffer, { + ...options, + cid + }) + } - return ipld.put(node, format, options) + return cid } module.exports = persist diff --git a/packages/ipfs-unixfs-importer/test/builder-dir-sharding.spec.js b/packages/ipfs-unixfs-importer/test/builder-dir-sharding.spec.js index b52b07b8..1107fdc6 100644 --- a/packages/ipfs-unixfs-importer/test/builder-dir-sharding.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-dir-sharding.spec.js @@ -11,12 +11,15 @@ const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const all = require('it-all') const last = require('it-last') +const blockApi = require('./helpers/block') describe('builder: directory sharding', () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) describe('basic dirbuilder', () => { @@ -25,7 +28,7 @@ describe('builder: directory sharding', () => { const nodes = await all(importer([{ path: 'a/b', content - }], ipld, { + }], block, { shardSplitThreshold: Infinity // never shard })) @@ -46,7 +49,7 @@ describe('builder: directory sharding', () => { const nodes = await all(importer([{ path: 'a/b', content: Buffer.from('i have the best bytes') - }], ipld, { + }], block, { shardSplitThreshold: 0 // always shard })) @@ -64,7 +67,7 @@ describe('builder: directory sharding', () => { const nodes = await all(importer([{ path: 'a/b', content: Buffer.from(content) - }], ipld, { + }], block, { shardSplitThreshold: Infinity // never shard })) @@ -92,7 +95,7 @@ describe('builder: directory sharding', () => { const nodes = await all(importer([{ path: 'a/b', content: Buffer.from(content) - }], ipld, { + }], block, { shardSplitThreshold: 0 // always shard })) @@ -133,7 +136,7 @@ describe('builder: directory sharding', () => { } } - const nodes = await all(importer(source, ipld)) + const nodes = await all(importer(source, block)) expect(nodes.length).to.equal(maxDirs + 1) const last = nodes[nodes.length - 1] @@ -152,7 +155,7 @@ describe('builder: directory sharding', () => { } } - const nodes = await all(importer(source, ipld)) + const nodes = await all(importer(source, block)) expect(nodes.length).to.equal(maxDirs + 1) // files plus the containing directory @@ -204,7 +207,7 @@ describe('builder: directory sharding', () => { } } - const node = await last(importer(source, ipld)) + const node = await last(importer(source, block)) expect(node.path).to.equal('big') rootHash = node.cid diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js index e7e7642c..3592c4a2 100644 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js @@ -8,19 +8,22 @@ const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const builder = require('../src/dag-builder') const all = require('it-all') +const blockApi = require('./helpers/block') describe('builder: onlyHash', () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('will only chunk and hash if passed an "onlyHash" option', async () => { const nodes = await all(builder([{ path: 'foo.txt', content: Buffer.from([0, 1, 2, 3, 4]) - }], ipld, { + }], block, { onlyHash: true, chunker: 'fixed', strategy: 'balanced', diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.js b/packages/ipfs-unixfs-importer/test/builder.spec.js index c9d5522d..dba989cf 100644 --- a/packages/ipfs-unixfs-importer/test/builder.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder.spec.js @@ -10,15 +10,18 @@ const inMemory = require('ipld-in-memory') const UnixFS = require('ipfs-unixfs') const builder = require('../src/dag-builder') const first = require('it-first') +const blockApi = require('./helpers/block') describe('builder', () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) - const testMultihashes = Object.keys(mh.names).slice(1, 40) + const testMultihashes = Object.keys(mh.names).slice(1, 10) const opts = { strategy: 'flat', chunker: 'fixed', @@ -43,7 +46,7 @@ describe('builder', () => { content: Buffer.from(content) } - const imported = await (await first(builder([inputFile], ipld, options)))() + const imported = await (await first(builder([inputFile], block, options)))() expect(imported).to.exist() @@ -74,7 +77,7 @@ describe('builder', () => { content: Buffer.alloc(262144 + 5).fill(1) } - const imported = await (await first(builder([inputFile], ipld, options)))() + const imported = await (await first(builder([inputFile], block, options)))() expect(imported).to.exist() expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) @@ -94,7 +97,7 @@ describe('builder', () => { content: null } - const imported = await (await first(builder([Object.assign({}, inputFile)], ipld, options)))() + const imported = await (await first(builder([Object.assign({}, inputFile)], block, options)))() expect(mh.decode(imported.cid.multihash).name).to.equal(hashAlg) diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js index 2dd00f0f..0c571215 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js @@ -9,6 +9,7 @@ const expect = chai.expect const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const mc = require('multicodec') +const blockApi = require('./helpers/block') // eslint bug https://github.com/eslint/eslint/issues/12459 // eslint-disable-next-line require-await @@ -19,11 +20,12 @@ const iter = async function * () { describe('custom chunker', function () { let inmem + let block const fromPartsTest = (iter, size) => async () => { for await (const part of importer([{ content: iter() - }], inmem, { + }], block, { chunkValidator: source => source, chunker: source => source, bufferImporter: async function * (file, source, ipld, options) { @@ -38,12 +40,13 @@ describe('custom chunker', function () { before(async () => { inmem = await inMemory(IPLD) + block = blockApi(inmem) }) it('keeps custom chunking', async () => { const chunker = source => source const content = iter() - for await (const part of importer([{ path: 'test', content }], inmem, { + for await (const part of importer([{ path: 'test', content }], block, { chunker })) { expect(part.size).to.equal(116) diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js index 94e44fda..9b993894 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js @@ -10,6 +10,7 @@ const IPLD = require('ipld') const inMemory = require('ipld-in-memory') const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') const first = require('it-first') +const blockApi = require('./helpers/block') const strategies = [ 'flat', @@ -30,9 +31,11 @@ strategies.forEach(strategy => { describe('go-ipfs interop using importer:' + strategy, () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('yields the same tree as go-ipfs', async function () { @@ -43,7 +46,7 @@ strategies.forEach(strategy => { content: randomByteStream(45900000, 7382) }] - const file = await first(importer(source, ipld, options)) + const file = await first(importer(source, block, options)) expect(file.cid.toBaseEncodedString()).to.be.equal(expectedHashes[strategy]) }) diff --git a/packages/ipfs-unixfs-importer/test/helpers/block.js b/packages/ipfs-unixfs-importer/test/helpers/block.js new file mode 100644 index 00000000..22d0243b --- /dev/null +++ b/packages/ipfs-unixfs-importer/test/helpers/block.js @@ -0,0 +1,35 @@ +'use strict' + +const DAG_PB = require('ipld-dag-pb') +const multicodec = require('multicodec') +const mh = require('multihashing-async').multihash + +module.exports = (ipld) => { + // make ipld behave like the block api, some tests need to pull + // data from ipld so can't use use a simple hash + return { + put: async (buf, { cid }) => { + const multihash = mh.decode(cid.multihash) + + if (cid.codec === 'dag-pb') { + buf = DAG_PB.util.deserialize(buf) + } + + await ipld.put(buf, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { + cidVersion: cid.version, + hashAlg: multihash.code + }) + + return { cid, data: buf } + }, + get: async (cid, options) => { + const node = await ipld.get(cid, options) + + if (cid.codec === 'dag-pb') { + return node.serialize() + } + + return { cid, data: node } + } + } +} diff --git a/packages/ipfs-unixfs-importer/test/import-export-nested-dir.spec.js b/packages/ipfs-unixfs-importer/test/import-export-nested-dir.spec.js index ae607121..7587cf55 100644 --- a/packages/ipfs-unixfs-importer/test/import-export-nested-dir.spec.js +++ b/packages/ipfs-unixfs-importer/test/import-export-nested-dir.spec.js @@ -9,13 +9,16 @@ const inMemory = require('ipld-in-memory') const all = require('it-all') const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') +const blockApi = require('./helpers/block') describe('import and export: directory', () => { const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('imports', async function () { @@ -35,7 +38,7 @@ describe('import and export: directory', () => { content: Buffer.from('cream') }] - const files = await all(importer(source, ipld)) + const files = await all(importer(source, block)) expect(files.map(normalizeNode).sort(byPath)).to.be.eql([{ path: 'a/b/h', diff --git a/packages/ipfs-unixfs-importer/test/import-export.spec.js b/packages/ipfs-unixfs-importer/test/import-export.spec.js index 16a91eae..26232d15 100644 --- a/packages/ipfs-unixfs-importer/test/import-export.spec.js +++ b/packages/ipfs-unixfs-importer/test/import-export.spec.js @@ -10,6 +10,7 @@ const inMemory = require('ipld-in-memory') const loadFixture = require('aegir/fixtures') const isNode = require('detect-node') const bigFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/1.2MiB.txt') +const blockApi = require('./helpers/block') const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') @@ -28,16 +29,18 @@ describe('import and export', function () { describe('using builder: ' + strategy, () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('imports and exports', async () => { const path = `${strategy}-big.dat` const values = [{ path: path, content: bigFile }] - for await (const file of importer(values, ipld, importerOptions)) { + for await (const file of importer(values, block, importerOptions)) { expect(file.path).to.eql(path) const result = await exporter(file.cid, ipld) diff --git a/packages/ipfs-unixfs-importer/test/importer.spec.js b/packages/ipfs-unixfs-importer/test/importer.spec.js index 29ea8f01..665e7798 100644 --- a/packages/ipfs-unixfs-importer/test/importer.spec.js +++ b/packages/ipfs-unixfs-importer/test/importer.spec.js @@ -19,6 +19,7 @@ const bigFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/1.2MiB.tx const smallFile = loadFixture((isNode ? __dirname : 'test') + '/fixtures/200Bytes.txt') const all = require('it-all') const first = require('it-first') +const blockApi = require('./helpers/block') function stringifyMh (files) { return files.map((file) => { @@ -177,11 +178,11 @@ const strategyOverrides = { } } -const checkLeafNodeTypes = async (ipld, options, expected) => { +const checkLeafNodeTypes = async (block, ipld, options, expected) => { const file = await first(importer([{ path: 'foo', content: Buffer.alloc(262144 + 5).fill(1) - }], ipld, options)) + }], block, options)) const node = await ipld.get(file.cid) const meta = UnixFs.unmarshal(node.Data) @@ -199,11 +200,11 @@ const checkLeafNodeTypes = async (ipld, options, expected) => { }) } -const checkNodeLinks = async (ipld, options, expected) => { +const checkNodeLinks = async (block, ipld, options, expected) => { for await (const file of importer([{ path: 'foo', content: Buffer.alloc(100).fill(1) - }], ipld, options)) { + }], block, options)) { const node = await ipld.get(file.cid) const meta = UnixFs.unmarshal(node.Data) @@ -312,12 +313,14 @@ strategies.forEach((strategy) => { this.timeout(30 * 1000) let ipld + let block const options = { strategy: strategy } before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('fails on bad content', async () => { @@ -325,7 +328,7 @@ strategies.forEach((strategy) => { await all(importer([{ path: '200Bytes.txt', content: 7 - }], ipld, options)) + }], block, options)) throw new Error('No error was thrown') } catch (err) { expect(err.code).to.equal('ERR_INVALID_CONTENT') @@ -341,7 +344,7 @@ strategies.forEach((strategy) => { yield 7 } } - }], ipld, options)) + }], block, options)) throw new Error('No error was thrown') } catch (err) { expect(err.code).to.equal('ERR_INVALID_CONTENT') @@ -358,7 +361,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: 'emptyfile', content: Buffer.alloc(0) - }], ipld, options)) + }], block, options)) expect(files.length).to.eql(1) @@ -374,7 +377,7 @@ strategies.forEach((strategy) => { }, { path: 'boop/200Bytes.txt', content: bigFile - }], ipld, options)) + }], block, options)) throw new Error('No error was thrown') } catch (err) { @@ -387,7 +390,7 @@ strategies.forEach((strategy) => { const res = await all(importer([{ path: '200Bytes.txt', content - }], ipld, options)) + }], block, options)) const file = await exporter(res[0].cid, ipld) const fileContent = await all(file.content()) @@ -400,7 +403,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: filePath, content: smallFile - }], ipld, options)) + }], block, options)) expect(files.length).to.equal(1) expect(files[0].path).to.equal(filePath) @@ -411,7 +414,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: filePath, content: smallFile - }], ipld, options)) + }], block, options)) expect(files.length).to.equal(1) expect(files[0].path).to.equal(filePath) @@ -421,7 +424,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: '200Bytes.txt', content: smallFile - }], ipld, options)) + }], block, options)) expectFiles(files, [ '200Bytes.txt' @@ -432,7 +435,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: '200Bytes.txt', content: Array.from(smallFile) - }], ipld, options)) + }], block, options)) expectFiles(files, [ '200Bytes.txt' @@ -443,7 +446,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: 'small.txt', content: 'this is a file\n' - }], ipld, options)) + }], block, options)) expectFiles(files, [ 'small.txt' @@ -454,7 +457,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: '200Bytes.txt', content: smallFile - }], ipld, { + }], block, { ...options, rawLeaves: true })) @@ -469,7 +472,7 @@ strategies.forEach((strategy) => { path: '200Bytes.txt', content: smallFile, mode: 0o123 - }], ipld, { + }], block, { ...options, rawLeaves: true })) @@ -487,7 +490,7 @@ strategies.forEach((strategy) => { secs: 10, nsecs: 0 } - }], ipld, { + }], block, { ...options, rawLeaves: true })) @@ -506,7 +509,7 @@ strategies.forEach((strategy) => { secs: 10, nsecs: 0 } - }], ipld, { + }], block, { ...options, rawLeaves: true })) @@ -520,7 +523,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: 'foo/bar/200Bytes.txt', content: smallFile - }], ipld, options)) + }], block, options)) expectFiles(files, [ 'foo/bar/200Bytes.txt', @@ -535,7 +538,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: '1.2MiB.txt', content: bigFile - }], ipld, options)) + }], block, options)) expectFiles(files, [ '1.2MiB.txt' @@ -548,7 +551,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: 'foo-big/1.2MiB.txt', content: bigFile - }], ipld, options)) + }], block, options)) expectFiles(files, [ 'foo-big/1.2MiB.txt', @@ -559,7 +562,7 @@ strategies.forEach((strategy) => { it('empty directory', async () => { const files = await all(importer([{ path: 'empty-dir' - }], ipld, options)) + }], block, options)) expectFiles(files, [ 'empty-dir' @@ -573,7 +576,7 @@ strategies.forEach((strategy) => { }, { path: 'pim/1.2MiB.txt', content: bigFile - }], ipld, options)) + }], block, options)) expectFiles(files, [ 'pim/200Bytes.txt', @@ -592,7 +595,7 @@ strategies.forEach((strategy) => { }, { path: 'pam/1.2MiB.txt', content: bigFile - }], ipld, options)) + }], block, options)) const result = stringifyMh(files) @@ -625,7 +628,7 @@ strategies.forEach((strategy) => { const files = await all(importer([{ path: content + '.txt', content: Buffer.from(content) - }], ipld, { + }], block, { onlyHash: true })) @@ -652,7 +655,7 @@ strategies.forEach((strategy) => { await all(importer([{ path: '1.2MiB.txt', content: bigFile - }], ipld, options)) + }], block, options)) expect(options.progress.called).to.equal(true) expect(options.progress.args[0][0]).to.equal(maxChunkSize) @@ -688,7 +691,7 @@ strategies.forEach((strategy) => { } // Pass a copy of inputFiles, since the importer mutates them - const files = await all(importer(inputFiles.map(f => Object.assign({}, f)), ipld, options)) + const files = await all(importer(inputFiles.map(f => Object.assign({}, f)), block, options)) const file = files[0] expect(file).to.exist() @@ -717,25 +720,25 @@ strategies.forEach((strategy) => { }) it('imports file with raw leaf nodes when specified', () => { - return checkLeafNodeTypes(ipld, { + return checkLeafNodeTypes(block, ipld, { leafType: 'raw' }, 'raw') }) it('imports file with file leaf nodes when specified', () => { - return checkLeafNodeTypes(ipld, { + return checkLeafNodeTypes(block, ipld, { leafType: 'file' }, 'file') }) it('reduces file to single node when specified', () => { - return checkNodeLinks(ipld, { + return checkNodeLinks(block, ipld, { reduceSingleLeafToSelf: true }, 0) }) it('does not reduce file to single node when overidden by options', () => { - return checkNodeLinks(ipld, { + return checkNodeLinks(block, ipld, { reduceSingleLeafToSelf: false }, 1) }) @@ -750,7 +753,7 @@ strategies.forEach((strategy) => { for await (const file of importer([{ path: '1.2MiB.txt', content: bigFile - }], ipld, options)) { + }], block, options)) { for await (const { cid } of collectLeafCids(file.cid, ipld)) { expect(cid.codec).to.be('raw') expect(cid.version).to.be(1) @@ -770,7 +773,7 @@ strategies.forEach((strategy) => { path: '1.2MiB.txt', content: bigFile, mtime: now - }], ipld, options)) { + }], block, options)) { const node = await exporter(file.cid, ipld) expect(node).to.have.nested.deep.property('unixfs.mtime', dateToTimespec(now)) @@ -785,7 +788,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', mtime: now - }], ipld)) + }], block)) const node = await exporter(entries[0].cid, ipld) expect(node).to.have.nested.deep.property('unixfs.mtime', dateToTimespec(now)) @@ -804,7 +807,7 @@ strategies.forEach((strategy) => { }, { path: '/foo/bar.txt', content: bigFile - }], ipld)) + }], block)) const nodes = await all(exporter.recursive(entries[entries.length - 1].cid, ipld)) const node = nodes.filter(node => node.unixfs.type === 'directory').pop() @@ -830,7 +833,7 @@ strategies.forEach((strategy) => { }, { path: '/foo/bar/baz.txt', content: bigFile - }], ipld)) + }], block)) const nodes = await all(exporter.recursive(entries[entries.length - 1].cid, ipld)) const node = nodes.filter(node => node.unixfs.type === 'directory').pop() @@ -861,7 +864,7 @@ strategies.forEach((strategy) => { }, { path: '/foo/bar/baz.txt', content: bigFile - }], ipld)) + }], block)) const nodes = await all(exporter.recursive(entries[entries.length - 1].cid, ipld)) const node = nodes.filter(node => node.unixfs.type === 'directory' && node.name === 'bar').pop() @@ -890,7 +893,7 @@ strategies.forEach((strategy) => { content: bigFile }, { path: '/foo/qux' - }], ipld, { + }], block, { shardSplitThreshold: 0 })) @@ -916,7 +919,7 @@ strategies.forEach((strategy) => { path: '1.2MiB.txt', content: bigFile, mode - }], ipld, options)) { + }], block, options)) { const node = await exporter(file.cid, ipld) expect(node).to.have.nested.property('unixfs.mode', mode) @@ -931,7 +934,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', mode - }], ipld)) + }], block)) const node = await exporter(entries[0].cid, ipld) expect(node).to.have.nested.property('unixfs.mode', mode) @@ -951,7 +954,7 @@ strategies.forEach((strategy) => { path: '/foo/file2.txt', content: bigFile, mode: mode2 - }], ipld)) + }], block)) const node1 = await exporter(entries[0].cid, ipld) expect(node1).to.have.nested.property('unixfs.mode', mode1) @@ -972,7 +975,7 @@ strategies.forEach((strategy) => { }, { path: '/foo/bar/baz/file2.txt', content: bigFile - }], ipld)) + }], block)) const node1 = await exporter(entries[0].cid, ipld) expect(node1).to.have.nested.property('unixfs.mode', mode) @@ -987,7 +990,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/file1.txt', content: bigFile - }], ipld)) + }], block)) const node1 = await exporter(entries[0].cid, ipld) expect(node1).to.have.nested.property('unixfs.mode', 0o0644) @@ -1001,12 +1004,12 @@ strategies.forEach((strategy) => { describe('configuration', () => { it('alllows configuring with custom dag and tree builder', async () => { let builtTree = false - const ipld = 'ipld' + const block = 'block' const entries = await all(importer([{ path: 'path', content: 'content' - }], ipld, { - dagBuilder: async function * (source, ipld, opts) { // eslint-disable-line require-await + }], block, { + dagBuilder: async function * (source, block, opts) { // eslint-disable-line require-await yield function () { return Promise.resolve({ cid: 'cid', @@ -1015,7 +1018,7 @@ describe('configuration', () => { }) } }, - treeBuilder: async function * (source, ipld, opts) { // eslint-disable-line require-await + treeBuilder: async function * (source, block, opts) { // eslint-disable-line require-await builtTree = true yield * source } @@ -1032,13 +1035,13 @@ describe('configuration', () => { it('alllows configuring with custom chunker', async () => { let validated = false let chunked = false - const ipld = { - put: () => 'cid' + const block = { + put: () => {} } const entries = await all(importer([{ path: 'path', content: 'content' - }], ipld, { + }], block, { chunkValidator: async function * (source, opts) { // eslint-disable-line require-await validated = true yield * source @@ -1050,7 +1053,6 @@ describe('configuration', () => { })) expect(entries).to.have.lengthOf(1) - expect(entries).to.have.nested.property('[0].cid', 'cid') expect(entries).to.have.nested.property('[0].path', 'path') expect(entries).to.have.nested.property('[0].unixfs') From e7989c79840b6d0746bf97df83d6c13e6198bee4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 16 Apr 2020 09:59:57 +0100 Subject: [PATCH 2/2] chore: fix up exporter tests --- .../test/exporter-sharded.spec.js | 7 ++-- .../test/exporter-subtree.spec.js | 13 ++++--- .../test/exporter.spec.js | 25 +++++++------ .../test/helpers/block.js | 35 +++++++++++++++++++ 4 files changed, 62 insertions(+), 18 deletions(-) create mode 100644 packages/ipfs-unixfs-exporter/test/helpers/block.js diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js index 89825524..d9112156 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js @@ -18,6 +18,7 @@ const { DAGLink, DAGNode } = require('ipld-dag-pb') +const blockApi = require('./helpers/block') const SHARD_SPLIT_THRESHOLD = 10 @@ -25,6 +26,7 @@ describe('exporter sharded', function () { this.timeout(30000) let ipld + let block const createShard = (numFiles) => { return createShardWithFileNames(numFiles, (index) => `file-${index}`) @@ -40,7 +42,7 @@ describe('exporter sharded', function () { } const createShardWithFiles = async (files) => { - return (await last(importer(files, ipld, { + return (await last(importer(files, block, { shardSplitThreshold: SHARD_SPLIT_THRESHOLD, wrapWithDirectory: true }))).cid @@ -48,6 +50,7 @@ describe('exporter sharded', function () { before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('exports a sharded directory', async () => { @@ -62,7 +65,7 @@ describe('exporter sharded', function () { const imported = await all(importer(Object.keys(files).map(path => ({ path, content: files[path].content - })), ipld, { + })), block, { wrapWithDirectory: true, shardSplitThreshold: SHARD_SPLIT_THRESHOLD })) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js index e3bc718c..ca1892d2 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js @@ -11,6 +11,7 @@ const mc = require('multicodec') const all = require('async-iterator-all') const last = require('it-last') const randomBytes = require('async-iterator-buffer-stream') +const blockApi = require('./helpers/block') const ONE_MEG = Math.pow(1024, 2) @@ -18,9 +19,11 @@ const exporter = require('./../src') describe('exporter subtree', () => { let ipld + let block before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('exports a file 2 levels down', async () => { @@ -32,7 +35,7 @@ describe('exporter subtree', () => { }, { path: './level-1/200Bytes.txt', content - }], ipld)) + }], block)) const exported = await exporter(`${imported.cid.toBaseEncodedString()}/level-1/200Bytes.txt`, ipld) @@ -54,7 +57,7 @@ describe('exporter subtree', () => { content }, { path: './level-1/level-2' - }], ipld)) + }], block)) const exported = await exporter(`${imported.cid.toBaseEncodedString()}/level-1`, ipld) const files = await all(exported.content()) @@ -74,7 +77,7 @@ describe('exporter subtree', () => { const imported = await last(importer([{ path: '/derp/200Bytes.txt', content: randomBytes(ONE_MEG) - }], ipld)) + }], block)) try { await exporter(`${imported.cid.toBaseEncodedString()}/doesnotexist`, ipld) @@ -89,7 +92,7 @@ describe('exporter subtree', () => { const imported = await last(importer([{ path: './level-1/200Bytes.txt', content - }], ipld, { + }], block, { wrapWithDirectory: true })) @@ -122,7 +125,7 @@ describe('exporter subtree', () => { }, { path: './level-1/level-2/200Bytes.txt', content - }], ipld)) + }], block)) const exported = await all(exporter.path(`${imported.cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`, ipld)) diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.js b/packages/ipfs-unixfs-exporter/test/exporter.spec.js index da6db0d2..5f89c9d2 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.js @@ -22,11 +22,13 @@ const last = require('it-last') const first = require('async-iterator-first') const randomBytes = require('async-iterator-buffer-stream') const AbortController = require('abort-controller') +const blockApi = require('./helpers/block') const ONE_MEG = Math.pow(1024, 2) describe('exporter', () => { let ipld + let block let bigFile let smallFile @@ -58,7 +60,7 @@ describe('exporter', () => { const result = await all(importer([{ path, content: file - }], ipld, { + }], block, { strategy, rawLeaves, chunkerOptions: { @@ -123,6 +125,7 @@ describe('exporter', () => { before(async () => { ipld = await inMemory(IPLD) + block = blockApi(ipld) }) it('ensure hash inputs are sanitized', async () => { @@ -148,7 +151,7 @@ describe('exporter', () => { const files = await all(importer([{ path: filePath, content: smallFile - }], ipld)) + }], block)) const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}` const file = await exporter(path, ipld) @@ -164,7 +167,7 @@ describe('exporter', () => { const files = await all(importer([{ path: filePath, content: smallFile - }], ipld)) + }], block)) const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}` const file = await exporter(path, ipld) @@ -333,7 +336,7 @@ describe('exporter', () => { content: randomBytes(ONE_MEG) }, { path: './level-1/level-2' - }], ipld)) + }], block)) const dir = await exporter(importedDir.cid, ipld) const files = await all(dir.content()) @@ -371,7 +374,7 @@ describe('exporter', () => { path: './dir-another' }, { path: './level-1' - }], ipld)) + }], block)) const dir = await exporter(importedDir.cid, ipld) const files = await all(dir.content()) @@ -516,7 +519,7 @@ describe('exporter', () => { const imported = await first(importer([{ path: '1.2MiB.txt', content: bigFile - }], ipld, { + }], block, { rawLeaves: true })) @@ -529,7 +532,7 @@ describe('exporter', () => { it('returns an empty stream for dir', async () => { const imported = await first(importer([{ path: 'empty' - }], ipld)) + }], block)) const dir = await exporter(imported.cid, ipld) const files = await all(dir.content()) expect(files.length).to.equal(0) @@ -755,7 +758,7 @@ describe('exporter', () => { const imported = await first(importer([{ path: '200Bytes.txt', content: bigFile - }], ipld, { + }], block, { rawLeaves: true })) @@ -771,7 +774,7 @@ describe('exporter', () => { const imported = await first(importer([{ path: '200Bytes.txt', content: smallFile - }], ipld, { + }], block, { rawLeaves: true })) @@ -862,7 +865,7 @@ describe('exporter', () => { const imported = await all(importer([{ path: '/foo/bar/baz.txt', content: Buffer.from('hello world') - }], ipld)) + }], block)) const exported = await exporter(imported[0].cid, ipld) @@ -879,7 +882,7 @@ describe('exporter', () => { }, { path: '/foo/bar/quux.txt', content: Buffer.from('hello world') - }], ipld)) + }], block)) const exported = await all(exporter.recursive(dir.cid, ipld)) const dirCid = dir.cid.toBaseEncodedString() diff --git a/packages/ipfs-unixfs-exporter/test/helpers/block.js b/packages/ipfs-unixfs-exporter/test/helpers/block.js new file mode 100644 index 00000000..22d0243b --- /dev/null +++ b/packages/ipfs-unixfs-exporter/test/helpers/block.js @@ -0,0 +1,35 @@ +'use strict' + +const DAG_PB = require('ipld-dag-pb') +const multicodec = require('multicodec') +const mh = require('multihashing-async').multihash + +module.exports = (ipld) => { + // make ipld behave like the block api, some tests need to pull + // data from ipld so can't use use a simple hash + return { + put: async (buf, { cid }) => { + const multihash = mh.decode(cid.multihash) + + if (cid.codec === 'dag-pb') { + buf = DAG_PB.util.deserialize(buf) + } + + await ipld.put(buf, cid.codec === 'dag-pb' ? multicodec.DAG_PB : multicodec.RAW, { + cidVersion: cid.version, + hashAlg: multihash.code + }) + + return { cid, data: buf } + }, + get: async (cid, options) => { + const node = await ipld.get(cid, options) + + if (cid.codec === 'dag-pb') { + return node.serialize() + } + + return { cid, data: node } + } + } +}