From ca3446607095bed2a3054bd89d14188de142106b Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 13 Dec 2018 16:26:14 +0000 Subject: [PATCH 1/4] test: convert tests to use in-memory IPLD --- package.json | 11 +- test/browser.js | 57 +- test/builder-balanced.js | 1 - test/builder-dir-sharding.js | 652 +++++++++++---------- test/builder-only-hash.js | 78 +-- test/builder.js | 198 +++---- test/hash-parity-with-go-ipfs.js | 66 ++- test/import-export-nested-dir.js | 172 +++--- test/import-export.js | 62 +- test/importer-flush.js | 314 +++++----- test/importer.js | 977 ++++++++++++++++--------------- test/node.js | 79 +-- test/with-dag-api.js | 2 - 13 files changed, 1287 insertions(+), 1382 deletions(-) diff --git a/package.json b/package.json index c24571e..371c269 100644 --- a/package.json +++ b/package.json @@ -40,17 +40,11 @@ "aegir": "^17.0.0", "chai": "^4.2.0", "dirty-chai": "^2.0.1", - "ipfs-block-service": "~0.15.1", - "ipfs-repo": "~0.25.0", "ipfs-unixfs-exporter": "~0.35.4", "ipld": "~0.20.0", - "mkdirp": "~0.5.1", "multihashes": "~0.4.14", - "ncp": "^2.0.0", "pull-generate": "^2.2.0", - "pull-stream-to-stream": "^1.3.4", - "pull-zip": "^2.0.1", - "rimraf": "^2.6.2", + "pull-traverse": "^1.0.3", "sinon": "^7.1.0" }, "dependencies": { @@ -65,14 +59,11 @@ "multihashing-async": "~0.5.1", "pull-batch": "^1.0.0", "pull-block": "^1.4.0", - "pull-cat": "^1.1.11", "pull-pair": "^1.1.0", - "pull-paramap": "^1.2.2", "pull-pause": "0.0.2", "pull-pushable": "^2.2.0", "pull-stream": "^3.6.9", "pull-through": "^1.0.18", - "pull-traverse": "^1.0.3", "pull-write": "^1.1.4", "stream-to-pull-stream": "^1.7.2" }, diff --git a/test/browser.js b/test/browser.js index e846743..7e94834 100644 --- a/test/browser.js +++ b/test/browser.js @@ -1,59 +1,4 @@ /* eslint-env mocha */ -/* global self */ 'use strict' -const series = require('async/series') -const IPFSRepo = require('ipfs-repo') - -const idb = self.indexedDB || - self.mozIndexedDB || - self.webkitIndexedDB || - self.msIndexedDB - -idb.deleteDatabase('ipfs') -idb.deleteDatabase('ipfs/blocks') - -describe('IPFS data importing tests on the Browser', function () { - const repo = new IPFSRepo('ipfs') - - before((done) => { - series([ - (cb) => repo.init({}, cb), - (cb) => repo.open(cb) - ], done) - }) - - after((done) => { - series([ - (cb) => repo.close(cb), - (cb) => { - idb.deleteDatabase('ipfs') - idb.deleteDatabase('ipfs/blocks') - cb() - } - ], done) - }) - - // Chunkers - require('./chunker-fixed-size') - require('./chunker-rabin-browser') - - // Graph Builders - require('./builder')(repo) - require('./builder-flat') - require('./builder-balanced') - require('./builder-trickle-dag') - require('./builder-only-hash')(repo) - // TODO: make these tests not require data on the repo - // require('./builder-dir-sharding')(repo) - - // Importer - require('./importer')(repo) - require('./importer-flush')(repo) - - // Other - require('./import-export')(repo) - require('./import-export-nested-dir')(repo) - require('./hash-parity-with-go-ipfs')(repo) - // require('./with-dag-api') -}) +require('./node') diff --git a/test/builder-balanced.js b/test/builder-balanced.js index 4c8bb28..73fe7e6 100644 --- a/test/builder-balanced.js +++ b/test/builder-balanced.js @@ -7,7 +7,6 @@ const expect = chai.expect const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') - const builder = require('../src/builder/balanced') function reduce (leaves, callback) { diff --git a/test/builder-dir-sharding.js b/test/builder-dir-sharding.js index 267e088..d1727be 100644 --- a/test/builder-dir-sharding.js +++ b/test/builder-dir-sharding.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ 'use strict' -const importer = require('./../src') +const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const IPLD = require('ipld') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const asyncMap = require('pull-stream/throughs/async-map') @@ -19,380 +18,383 @@ const setImmediate = require('async/setImmediate') const leftPad = require('left-pad') const CID = require('cids') -module.exports = (repo) => { - describe('builder: directory sharding', function () { - this.timeout(30 * 1000) +describe('builder: directory sharding', function () { + this.timeout(30 * 1000) - let ipld + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - describe('basic dirbuilder', () => { - let nonShardedHash, shardedHash - - it('yields a non-sharded dir', (done) => { - const options = { - shardSplitThreshold: Infinity // never shard - } - - pull( - values([ - { - path: 'a/b', - content: pull.values([Buffer.from('i have the best bytes')]) - } - ]), - importer(ipld, options), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - nonShardedHash = nodes[1].multihash - expect(nonShardedHash).to.exist() - done() - } catch (err) { - done(err) - } - }) - ) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - it('yields a sharded dir', (done) => { - const options = { - shardSplitThreshold: 0 // always shard - } + ipld = resolver - pull( - values([ - { - path: 'a/b', - content: values([Buffer.from('i have the best bytes')]) - } - ]), - importer(ipld, options), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - shardedHash = nodes[1].multihash - // hashes are different - expect(shardedHash).to.not.equal(nonShardedHash) - done() - } catch (err) { - done(err) - } - }) - ) - }) + done() + }) + }) - it('exporting unsharded hash results in the correct files', (done) => { - pull( - exporter(nonShardedHash, ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(nonShardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(29) - } catch (err) { - return done(err) - } + describe('basic dirbuilder', () => { + let nonShardedHash, shardedHash - pull( - nodes[1].content, - collect(collected) - ) - }) - ) + it('yields a non-sharded dir', (done) => { + const options = { + shardSplitThreshold: Infinity // never shard + } - function collected (err, content) { + pull( + values([ + { + path: 'a/b', + content: pull.values([Buffer.from('i have the best bytes')]) + } + ]), + importer(ipld, options), + collect((err, nodes) => { try { expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + nonShardedHash = nodes[1].multihash + expect(nonShardedHash).to.exist() done() } catch (err) { done(err) } - } - }) - - it('exporting sharded hash results in the correct files', (done) => { - pull( - exporter(shardedHash, ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(shardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(21) - } catch (err) { - return done(err) - } + }) + ) + }) - pull( - nodes[1].content, - collect(collected) - ) - }) - ) + it('yields a sharded dir', (done) => { + const options = { + shardSplitThreshold: 0 // always shard + } - function collected (err, content) { + pull( + values([ + { + path: 'a/b', + content: values([Buffer.from('i have the best bytes')]) + } + ]), + importer(ipld, options), + collect((err, nodes) => { try { expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + shardedHash = nodes[1].multihash + // hashes are different + expect(shardedHash).to.not.equal(nonShardedHash) done() } catch (err) { done(err) } - } - }) + }) + ) }) - describe('big dir', () => { - const maxDirs = 2000 - let rootHash - - it('imports a big dir', (done) => { - const push = pushable() - pull( - push, - importer(ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(maxDirs + 1) - const last = nodes[nodes.length - 1] - expect(last.path).to.be.eql('big') - rootHash = last.multihash - done() - } catch (err) { - done(err) - } - }) - ) - - let pending = maxDirs - let i = 0 - - whilst( - () => pending, - (callback) => { - pending-- - i++ - const pushable = { - path: 'big/' + leftPad(i.toString(), 4, '0'), - content: values([Buffer.from(i.toString())]) - } - push.push(pushable) - setImmediate(callback) - }, - (err) => { + it('exporting unsharded hash results in the correct files', (done) => { + pull( + exporter(nonShardedHash, ipld), + collect((err, nodes) => { + try { expect(err).to.not.exist() - push.end() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(nonShardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(29) + } catch (err) { + return done(err) } - ) - }) - it('exports a big dir', (done) => { - const contentEntries = [] - const entries = {} - pull( - exporter(rootHash, ipld), - asyncMap((node, callback) => { - if (node.content) { - pull( - node.content, - collect(collected) - ) - } else { - entries[node.path] = node - callback() - } + pull( + nodes[1].content, + collect(collected) + ) + }) + ) - function collected (err, content) { - expect(err).to.not.exist() - entries[node.path] = { content: content.toString() } - callback(null, node) - } - }), - collect((err, nodes) => { - expect(err).to.not.exist() - const paths = Object.keys(entries).sort() - expect(paths.length).to.be.eql(2001) - paths.forEach(eachPath) - done() - }) - ) + function collected (err, content) { + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) + } + } + }) - function eachPath (path, index) { - if (!index) { - // first dir - expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) - const entry = entries[path] - expect(entry).to.exist() - expect(entry.content).to.not.exist() - return - } - // dir entries - const content = entries[path] && entries[path].content - if (content) { - expect(content).to.be.eql(index.toString()) - contentEntries.push(path) + it('exporting sharded hash results in the correct files', (done) => { + pull( + exporter(shardedHash, ipld), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(shardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(21) + } catch (err) { + return done(err) } + + pull( + nodes[1].content, + collect(collected) + ) + }) + ) + + function collected (err, content) { + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) } - }) + } }) + }) - describe('big nested dir', () => { - const maxDirs = 2000 - const maxDepth = 3 - let rootHash + describe('big dir', () => { + const maxDirs = 2000 + let rootHash - it('imports a big dir', (done) => { - const push = pushable() - pull( - push, - importer(ipld), - collect((err, nodes) => { + it('imports a big dir', (done) => { + const push = pushable() + pull( + push, + importer(ipld), + collect((err, nodes) => { + try { expect(err).to.not.exist() + expect(nodes.length).to.be.eql(maxDirs + 1) const last = nodes[nodes.length - 1] expect(last.path).to.be.eql('big') rootHash = last.multihash done() - }) - ) + } catch (err) { + done(err) + } + }) + ) + + let pending = maxDirs + let i = 0 + + whilst( + () => pending, + (callback) => { + pending-- + i++ + const pushable = { + path: 'big/' + leftPad(i.toString(), 4, '0'), + content: values([Buffer.from(i.toString())]) + } + push.push(pushable) + setImmediate(callback) + }, + (err) => { + expect(err).to.not.exist() + push.end() + } + ) + }) - let pending = maxDirs - let pendingDepth = maxDepth - let i = 0 - let depth = 1 + it('exports a big dir', (done) => { + const contentEntries = [] + const entries = {} + pull( + exporter(rootHash, ipld), + asyncMap((node, callback) => { + if (node.content) { + pull( + node.content, + collect(collected) + ) + } else { + entries[node.path] = node + callback() + } - whilst( - () => pendingDepth && pending, - (callback) => { - i++ - const dir = [] - for (let d = 0; d < depth; d++) { - dir.push('big') - } - const pushed = { - path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'), - content: values([Buffer.from(i.toString())]) - } - push.push(pushed) - pending-- - if (!pending) { - pendingDepth-- - pending = maxDirs - i = 0 - depth++ - } - setImmediate(callback) - }, - (err) => { + function collected (err, content) { expect(err).to.not.exist() - push.end() + entries[node.path] = { content: content.toString() } + callback(null, node) } - ) - }) - - it('exports a big dir', (done) => { - const entries = {} - pull( - exporter(rootHash, ipld), - asyncMap((node, callback) => { - if (node.content) { - pull( - node.content, - collect(collected) - ) - } else { - entries[node.path] = node - callback() - } - - function collected (err, content) { - expect(err).to.not.exist() - entries[node.path] = { content: content.toString() } - callback(null, node) - } - }), - collect(collected) - ) - - function collected (err, nodes) { + }), + collect((err, nodes) => { expect(err).to.not.exist() const paths = Object.keys(entries).sort() - expect(paths.length).to.be.eql(maxDepth * maxDirs + maxDepth) - let index = 0 - let depth = 1 + expect(paths.length).to.be.eql(2001) paths.forEach(eachPath) done() + }) + ) + + function eachPath (path, index) { + if (!index) { + // first dir + expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) + const entry = entries[path] + expect(entry).to.exist() + expect(entry.content).to.not.exist() + return + } + // dir entries + const content = entries[path] && entries[path].content + if (content) { + expect(content).to.be.eql(index.toString()) + contentEntries.push(path) + } + } + }) + }) - function eachPath (path) { - if (!index) { - // first dir - if (depth === 1) { - expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) - } - const entry = entries[path] - expect(entry).to.exist() - expect(entry.content).to.not.exist() - } else { - // dir entries - const pathElements = path.split('/') - expect(pathElements.length).to.be.eql(depth + 1) - const lastElement = pathElements[pathElements.length - 1] - expect(lastElement).to.be.eql(leftPad(index.toString(), 4, '0')) - expect(entries[path].content).to.be.eql(index.toString()) - } - index++ - if (index > maxDirs) { - index = 0 - depth++ + describe('big nested dir', () => { + const maxDirs = 2000 + const maxDepth = 3 + let rootHash + + it('imports a big dir', (done) => { + const push = pushable() + pull( + push, + importer(ipld), + collect((err, nodes) => { + expect(err).to.not.exist() + const last = nodes[nodes.length - 1] + expect(last.path).to.be.eql('big') + rootHash = last.multihash + done() + }) + ) + + let pending = maxDirs + let pendingDepth = maxDepth + let i = 0 + let depth = 1 + + whilst( + () => pendingDepth && pending, + (callback) => { + i++ + const dir = [] + for (let d = 0; d < depth; d++) { + dir.push('big') + } + const pushed = { + path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'), + content: values([Buffer.from(i.toString())]) + } + push.push(pushed) + pending-- + if (!pending) { + pendingDepth-- + pending = maxDirs + i = 0 + depth++ + } + setImmediate(callback) + }, + (err) => { + expect(err).to.not.exist() + push.end() + } + ) + }) + + it('exports a big dir', (done) => { + const entries = {} + pull( + exporter(rootHash, ipld), + asyncMap((node, callback) => { + if (node.content) { + pull( + node.content, + collect(collected) + ) + } else { + entries[node.path] = node + callback() + } + + function collected (err, content) { + expect(err).to.not.exist() + entries[node.path] = { content: content.toString() } + callback(null, node) + } + }), + collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + const paths = Object.keys(entries).sort() + expect(paths.length).to.be.eql(maxDepth * maxDirs + maxDepth) + let index = 0 + let depth = 1 + paths.forEach(eachPath) + done() + + function eachPath (path) { + if (!index) { + // first dir + if (depth === 1) { + expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) } + const entry = entries[path] + expect(entry).to.exist() + expect(entry.content).to.not.exist() + } else { + // dir entries + const pathElements = path.split('/') + expect(pathElements.length).to.be.eql(depth + 1) + const lastElement = pathElements[pathElements.length - 1] + expect(lastElement).to.be.eql(leftPad(index.toString(), 4, '0')) + expect(entries[path].content).to.be.eql(index.toString()) + } + index++ + if (index > maxDirs) { + index = 0 + depth++ } } - }) + } + }) - it('exports a big dir with subpath', (done) => { - const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000' + it('exports a big dir with subpath', (done) => { + const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000' + pull( + exporter(exportHash, ipld), + collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + expect(nodes.length).to.equal(1) + expect(nodes.map((node) => node.path)).to.deep.equal([ + '2000' + ]) pull( - exporter(exportHash, ipld), - collect(collected) + nodes[0].content, + collect((err, content) => { + expect(err).to.not.exist() + expect(content.toString()).to.equal('2000') + done() + }) ) - - function collected (err, nodes) { - expect(err).to.not.exist() - expect(nodes.length).to.equal(1) - expect(nodes.map((node) => node.path)).to.deep.equal([ - '2000' - ]) - pull( - nodes[0].content, - collect((err, content) => { - expect(err).to.not.exist() - expect(content.toString()).to.equal('2000') - done() - }) - ) - } - }) + } }) }) -} +}) diff --git a/test/builder-only-hash.js b/test/builder-only-hash.js index 7bf14fd..464664d 100644 --- a/test/builder-only-hash.js +++ b/test/builder-only-hash.js @@ -4,52 +4,54 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') -const Ipld = require('ipld') +const IPLD = require('ipld') const CID = require('cids') const createBuilder = require('../src/builder') const FixedSizeChunker = require('../src/chunker/fixed-size') -module.exports = (repo) => { - describe('builder: onlyHash', () => { - let ipld +describe('builder: onlyHash', () => { + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver - it('will only chunk and hash if passed an "onlyHash" option', (done) => { - const onCollected = (err, nodes) => { - if (err) return done(err) - - const node = nodes[0] - expect(node).to.exist() - - ipld.get(new CID(node.multihash), (err, res) => { - expect(err).to.exist() - done() - }) - } - - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) - } - - const options = { - onlyHash: true - } - - pull( - values([inputFile]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) + done() }) }) -} + + it('will only chunk and hash if passed an "onlyHash" option', (done) => { + const onCollected = (err, nodes) => { + if (err) return done(err) + + const node = nodes[0] + expect(node).to.exist() + + ipld.get(new CID(node.multihash), (err, res) => { + expect(err).to.exist() + done() + }) + } + + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } + + const options = { + onlyHash: true + } + + pull( + values([inputFile]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }) +}) diff --git a/test/builder.js b/test/builder.js index e82f0ab..5975009 100644 --- a/test/builder.js +++ b/test/builder.js @@ -15,131 +15,127 @@ const UnixFS = require('ipfs-unixfs') const createBuilder = require('../src/builder') const FixedSizeChunker = require('../src/chunker/fixed-size') -module.exports = (repo) => { - describe('builder', () => { - let ipld +describe('builder', () => { + let ipld - const testMultihashes = Object.keys(mh.names).slice(1, 40) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - before((done) => { - IPLD.inMemory((err, resolver) => { - if (err) { - return done(err) - } - - ipld = resolver + ipld = resolver - done() - }) + done() }) + }) - it('allows multihash hash algorithm to be specified', (done) => { - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) - } - - const onCollected = (err, nodes) => { - if (err) return cb(err) + const testMultihashes = Object.keys(mh.names).slice(1, 40) - const node = nodes[0] - expect(node).to.exist() + it('allows multihash hash algorithm to be specified', (done) => { + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } - const cid = new CID(node.multihash) + const onCollected = (err, nodes) => { + if (err) return cb(err) - // Verify multihash has been encoded using hashAlg - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + const node = nodes[0] + expect(node).to.exist() - // Fetch using hashAlg encoded multihash - ipld.get(cid, (err, res) => { - if (err) return cb(err) - const content = UnixFS.unmarshal(res.value.data).data - expect(content.equals(inputFile.content)).to.be.true() - cb() - }) - } + const cid = new CID(node.multihash) - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) + // Verify multihash has been encoded using hashAlg + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - it('allows multihash hash algorithm to be specified for big file', function (done) { - this.timeout(30000) + // Fetch using hashAlg encoded multihash + ipld.get(cid, (err, res) => { + if (err) return cb(err) + const content = UnixFS.unmarshal(res.value.data).data + expect(content.equals(inputFile.content)).to.be.true() + cb() + }) + } + + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) + }) - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - // Bigger than maxChunkSize - content: Buffer.alloc(262144 + 5).fill(1) - } + it('allows multihash hash algorithm to be specified for big file', function (done) { + this.timeout(30000) - const onCollected = (err, nodes) => { - if (err) return cb(err) + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + // Bigger than maxChunkSize + content: Buffer.alloc(262144 + 5).fill(1) + } - const node = nodes[0] + const onCollected = (err, nodes) => { + if (err) return cb(err) - try { - expect(node).to.exist() - const cid = new CID(node.multihash) - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - } catch (err) { - return cb(err) - } + const node = nodes[0] - cb() + try { + expect(node).to.exist() + const cid = new CID(node.multihash) + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + } catch (err) { + return cb(err) } - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) + cb() + } - it('allows multihash hash algorithm to be specified for a directory', (done) => { - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const inputFile = { - path: `${String(Math.random() + Date.now())}-dir`, - content: null - } + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) + }) - const onCollected = (err, nodes) => { - if (err) return cb(err) + it('allows multihash hash algorithm to be specified for a directory', (done) => { + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const inputFile = { + path: `${String(Math.random() + Date.now())}-dir`, + content: null + } - const node = nodes[0] + const onCollected = (err, nodes) => { + if (err) return cb(err) - expect(node).to.exist() + const node = nodes[0] - const cid = new CID(node.multihash) + expect(node).to.exist() - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + const cid = new CID(node.multihash) - // Fetch using hashAlg encoded multihash - ipld.get(cid, (err, res) => { - if (err) return cb(err) - const meta = UnixFS.unmarshal(res.value.data) - expect(meta.type).to.equal('directory') - cb() - }) - } + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) + // Fetch using hashAlg encoded multihash + ipld.get(cid, (err, res) => { + if (err) return cb(err) + const meta = UnixFS.unmarshal(res.value.data) + expect(meta.type).to.equal('directory') + cb() + }) + } + + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) }) -} +}) diff --git a/test/hash-parity-with-go-ipfs.js b/test/hash-parity-with-go-ipfs.js index 66cbb89..a9ef437 100644 --- a/test/hash-parity-with-go-ipfs.js +++ b/test/hash-parity-with-go-ipfs.js @@ -6,12 +6,11 @@ const importer = require('./../src') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') const CID = require('cids') -const Ipld = require('ipld') +const IPLD = require('ipld') const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') const strategies = [ @@ -26,40 +25,43 @@ const expectedHashes = { trickle: 'QmYPsm9oVGjWECkT7KikZmrf8imggqKe8uS8Jco3qfWUCH' } -module.exports = (repo) => { - strategies.forEach(strategy => { - const options = { - strategy: strategy - } +strategies.forEach(strategy => { + const options = { + strategy: strategy + } - describe('go-ipfs interop using importer:' + strategy, () => { - let ipld + describe('go-ipfs interop using importer:' + strategy, () => { + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - it('yields the same tree as go-ipfs', function (done) { - this.timeout(10 * 1000) - pull( - values([ - { - path: 'big.dat', - content: randomByteStream(45900000, 7382) - } - ]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.be.equal(1) + ipld = resolver - const file = files[0] - expect(new CID(file.multihash).toBaseEncodedString()).to.be.equal(expectedHashes[strategy]) - done() - }) - ) + done() }) }) + + it('yields the same tree as go-ipfs', function (done) { + this.timeout(10 * 1000) + pull( + values([ + { + path: 'big.dat', + content: randomByteStream(45900000, 7382) + } + ]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.be.equal(1) + + const file = files[0] + expect(new CID(file.multihash).toBaseEncodedString()).to.be.equal(expectedHashes[strategy]) + done() + }) + ) + }) }) -} +}) diff --git a/test/import-export-nested-dir.js b/test/import-export-nested-dir.js index 845bd07..8b5f371 100644 --- a/test/import-export-nested-dir.js +++ b/test/import-export-nested-dir.js @@ -4,8 +4,7 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const IPLD = require('ipld') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') @@ -15,99 +14,102 @@ const CID = require('cids') const importer = require('./../src') const exporter = require('ipfs-unixfs-exporter') -module.exports = (repo) => { - describe('import and export: directory', () => { - const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' - let ipld +describe('import and export: directory', () => { + const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - it('imports', function (done) { - this.timeout(20 * 1000) + ipld = resolver - pull( - values([ - { path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) }, - { path: 'a/b/c/d/f', content: pull.values([Buffer.from('strawberry')]) }, - { path: 'a/b/g', content: pull.values([Buffer.from('ice')]) }, - { path: 'a/b/h', content: pull.values([Buffer.from('cream')]) } - ]), - importer(ipld), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.map(normalizeNode).sort(byPath)).to.be.eql([ - { path: 'a/b/h', - multihash: 'QmWHMpCtdNjemT2F3SjyrmnBXQXwEohaZd4apcbFBhbFRC' }, - { path: 'a/b/g', - multihash: 'QmQGwYzzTPcbqTiy2Nbp88gqqBqCWY4QZGfen45LFZkD5n' }, - { path: 'a/b/c/d/f', - multihash: 'QmNVHs2dy7AjGUotsubWVncRsD3SpRXm8MgmCCQTVdVACz' }, - { path: 'a/b/c/d/e', - multihash: 'QmYPbDKwc7oneCcEc6BcRSN5GXthTGWUCd19bTCyP9u3vH' }, - { path: 'a/b/c/d', - multihash: 'QmQGDXr3ysARM38n7h79Tx7yD3YxuzcnZ1naG71WMojPoj' }, - { path: 'a/b/c', - multihash: 'QmYTVcjYpN3hQLtJstCPE8hhEacAYjWAuTmmAAXoonamuE' }, - { path: 'a/b', - multihash: 'QmWyWYxq1GD9fEyckf5LrJv8hMW35CwfWwzDBp8bTw3NQj' }, - { path: 'a', - multihash: rootHash } - ]) - done() - }) - ) + done() }) + }) - it('exports', function (done) { - this.timeout(20 * 1000) + it('imports', function (done) { + this.timeout(20 * 1000) - pull( - exporter(rootHash, ipld), - collect((err, files) => { - expect(err).to.not.exist() - map( - files, - (file, callback) => { - if (file.content) { - pull( - file.content, - collect(mapFile(file, callback)) - ) - } else { - callback(null, { path: file.path }) - } - }, - (err, files) => { - expect(err).to.not.exist() - expect(files.filter(fileHasContent).sort(byPath)).to.eql([ - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/h', - content: 'cream' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/g', - content: 'ice' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/f', - content: 'strawberry' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/e', - content: 'banana' } - ]) - done() - }) - }) - ) + pull( + values([ + { path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) }, + { path: 'a/b/c/d/f', content: pull.values([Buffer.from('strawberry')]) }, + { path: 'a/b/g', content: pull.values([Buffer.from('ice')]) }, + { path: 'a/b/h', content: pull.values([Buffer.from('cream')]) } + ]), + importer(ipld), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.map(normalizeNode).sort(byPath)).to.be.eql([ + { path: 'a/b/h', + multihash: 'QmWHMpCtdNjemT2F3SjyrmnBXQXwEohaZd4apcbFBhbFRC' }, + { path: 'a/b/g', + multihash: 'QmQGwYzzTPcbqTiy2Nbp88gqqBqCWY4QZGfen45LFZkD5n' }, + { path: 'a/b/c/d/f', + multihash: 'QmNVHs2dy7AjGUotsubWVncRsD3SpRXm8MgmCCQTVdVACz' }, + { path: 'a/b/c/d/e', + multihash: 'QmYPbDKwc7oneCcEc6BcRSN5GXthTGWUCd19bTCyP9u3vH' }, + { path: 'a/b/c/d', + multihash: 'QmQGDXr3ysARM38n7h79Tx7yD3YxuzcnZ1naG71WMojPoj' }, + { path: 'a/b/c', + multihash: 'QmYTVcjYpN3hQLtJstCPE8hhEacAYjWAuTmmAAXoonamuE' }, + { path: 'a/b', + multihash: 'QmWyWYxq1GD9fEyckf5LrJv8hMW35CwfWwzDBp8bTw3NQj' }, + { path: 'a', + multihash: rootHash } + ]) + done() + }) + ) + }) - function mapFile (file, callback) { - return (err, fileContent) => { - callback(err, fileContent && { - path: file.path, - content: fileContent.toString() + it('exports', function (done) { + this.timeout(20 * 1000) + + pull( + exporter(rootHash, ipld), + collect((err, files) => { + expect(err).to.not.exist() + map( + files, + (file, callback) => { + if (file.content) { + pull( + file.content, + collect(mapFile(file, callback)) + ) + } else { + callback(null, { path: file.path }) + } + }, + (err, files) => { + expect(err).to.not.exist() + expect(files.filter(fileHasContent).sort(byPath)).to.eql([ + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/h', + content: 'cream' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/g', + content: 'ice' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/f', + content: 'strawberry' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/e', + content: 'banana' } + ]) + done() }) - } + }) + ) + + function mapFile (file, callback) { + return (err, fileContent) => { + callback(err, fileContent && { + path: file.path, + content: fileContent.toString() + }) } - }) + } }) -} +}) function normalizeNode (node) { return { diff --git a/test/import-export.js b/test/import-export.js index 9e6733c..f2a276d 100644 --- a/test/import-export.js +++ b/test/import-export.js @@ -5,8 +5,7 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const IPLD = require('ipld') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const concat = require('pull-stream/sinks/concat') @@ -37,41 +36,44 @@ function fileEql (f1, fileData, callback) { ) } -module.exports = (repo) => { - describe('import and export', function () { - this.timeout(30 * 1000) +describe('import and export', function () { + this.timeout(30 * 1000) - strategies.forEach((strategy) => { - const importerOptions = { strategy: strategy } + strategies.forEach((strategy) => { + const importerOptions = { strategy: strategy } - describe('using builder: ' + strategy, () => { - let ipld + describe('using builder: ' + strategy, () => { + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() }) + }) - it('import and export', (done) => { - const path = strategy + '-big.dat' + it('import and export', (done) => { + const path = strategy + '-big.dat' - pull( - values([{ path: path, content: values(bigFile) }]), - importer(ipld, importerOptions), - map((file) => { - expect(file.path).to.eql(path) + pull( + values([{ path: path, content: values(bigFile) }]), + importer(ipld, importerOptions), + map((file) => { + expect(file.path).to.eql(path) - return exporter(file.multihash, ipld) - }), - flatten(), - collect((err, files) => { - expect(err).to.not.exist() - expect(files[0].size).to.eql(bigFile.length) - fileEql(files[0], bigFile, done) - }) - ) - }) + return exporter(file.multihash, ipld) + }), + flatten(), + collect((err, files) => { + expect(err).to.not.exist() + expect(files[0].size).to.eql(bigFile.length) + fileEql(files[0], bigFile, done) + }) + ) }) }) }) -} +}) diff --git a/test/importer-flush.js b/test/importer-flush.js index 595ee0a..0144929 100644 --- a/test/importer-flush.js +++ b/test/importer-flush.js @@ -6,195 +6,197 @@ const createImporter = require('./../src') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const IPLD = require('ipld') const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const map = require('pull-stream/throughs/map') const collect = require('pull-stream/sinks/collect') const pushable = require('pull-pushable') -module.exports = (repo) => { - describe('importer: flush', () => { - let ipld +describe('importer: flush', () => { + let ipld - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - it('can push a single root file and flush yields no dirs', (done) => { - const source = pushable() - const importer = createImporter(ipld) - pull( - source, - importer, - map(node => { - expect(node.path).to.be.eql('a') - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.be.eql(1) - done() - }) - ) + ipld = resolver + + done() + }) + }) - source.push({ - path: 'a', - content: values([Buffer.from('hey')]) + it('can push a single root file and flush yields no dirs', (done) => { + const source = pushable() + const importer = createImporter(ipld) + pull( + source, + importer, + map(node => { + expect(node.path).to.be.eql('a') + return node + }), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.be.eql(1) + done() }) + ) - importer.flush((err, hash) => { + source.push({ + path: 'a', + content: values([Buffer.from('hey')]) + }) + + importer.flush((err, hash) => { + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() + source.end() + }) + }) + + it('can push a nested file and flush yields parent dir', (done) => { + const source = pushable() + const importer = createImporter(ipld) + let count = 0 + pull( + source, + importer, + map(function (node) { + count++ + if (count === 1) { + expect(node.path).to.be.eql('b/c') + } else if (count === 2) { + expect(node.path).to.be.eql('b') + } + return node + }), + collect((err, files) => { expect(err).to.not.exist() - expect(Buffer.isBuffer(hash)).to.be.true() - source.end() + expect(count).to.be.eql(2) + done() }) + ) + + source.push({ + path: 'b/c', + content: values([Buffer.from('hey')]) }) - it('can push a nested file and flush yields parent dir', (done) => { - const source = pushable() - const importer = createImporter(ipld) - let count = 0 - pull( - source, - importer, - map(function (node) { - count++ - if (count === 1) { - expect(node.path).to.be.eql('b/c') - } else if (count === 2) { - expect(node.path).to.be.eql('b') - } - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(count).to.be.eql(2) - done() - }) - ) + importer.flush((err, hash) => { + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() + source.end() + }) + }) - source.push({ - path: 'b/c', - content: values([Buffer.from('hey')]) + it('can flush many times, always coherent', (done) => { + const maxDepth = 4 + const maxEntriesPerDir = 3 + + let count = 0 + const tree = { children: {}, path: '', depth: 0, yielded: true } + let currentDir = tree + + const source = pushable() + const importer = createImporter(ipld) + + pull( + source, + importer, + map((node) => { + count++ + markDirAsYielded(node) + return node + }), + collect((err, files) => { + expect(err).to.not.exist() + expect(count).to.be.eql(2) + done() }) + ) + + pushAndFlush() + + function pushAndFlush () { + const childCount = Object.keys(currentDir.children).length + const newDirName = childCount.toString() + const dirPath = currentDir.path + (currentDir.depth > 0 ? '/' : '') + newDirName + const newDir = { + children: {}, + path: dirPath, + depth: currentDir.depth + 1, + yielded: false, + parent: currentDir + } + currentDir.children[newDirName] = newDir + markAncestorsAsDirty(currentDir) + const filePath = dirPath + '/filename' + const file = { + path: filePath, + content: values([Buffer.from('file with path ' + filePath)]) + } + source.push(file) + if (currentDir.depth === 0 || childCount + 1 === maxEntriesPerDir) { + currentDir = newDir + } importer.flush((err, hash) => { expect(err).to.not.exist() expect(Buffer.isBuffer(hash)).to.be.true() - source.end() - }) - }) - - it('can flush many times, always coherent', (done) => { - const maxDepth = 4 - const maxEntriesPerDir = 3 - - let count = 0 - const tree = { children: {}, path: '', depth: 0, yielded: true } - let currentDir = tree - - const source = pushable() - const importer = createImporter(ipld) - - pull( - source, - importer, - map((node) => { - count++ - markDirAsYielded(node) - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(count).to.be.eql(2) + testAllYielded(tree) + if (currentDir.depth < maxDepth) { + pushAndFlush() + } else { + expect(count).to.be.eql(38) done() - }) - ) - - pushAndFlush() - - function pushAndFlush () { - const childCount = Object.keys(currentDir.children).length - const newDirName = childCount.toString() - const dirPath = currentDir.path + (currentDir.depth > 0 ? '/' : '') + newDirName - const newDir = { - children: {}, - path: dirPath, - depth: currentDir.depth + 1, - yielded: false, - parent: currentDir } - currentDir.children[newDirName] = newDir - markAncestorsAsDirty(currentDir) + }) + } - const filePath = dirPath + '/filename' - const file = { - path: filePath, - content: values([Buffer.from('file with path ' + filePath)]) - } - source.push(file) - if (currentDir.depth === 0 || childCount + 1 === maxEntriesPerDir) { - currentDir = newDir - } - importer.flush((err, hash) => { - expect(err).to.not.exist() - expect(Buffer.isBuffer(hash)).to.be.true() - testAllYielded(tree) - if (currentDir.depth < maxDepth) { - pushAndFlush() - } else { - expect(count).to.be.eql(38) - done() - } - }) + function markDirAsYielded (node) { + const dir = findDir(tree, node.path) + if (node.path === dir.path) { + expect(dir.yielded).to.be.false() + dir.yielded = true } + } - function markDirAsYielded (node) { - const dir = findDir(tree, node.path) - if (node.path === dir.path) { - expect(dir.yielded).to.be.false() - dir.yielded = true - } + function findDir (tree, path) { + const pathElems = path.split('/').filter(notEmpty) + const child = tree.children[pathElems.shift()] + if (!child) { + return tree } - - function findDir (tree, path) { - const pathElems = path.split('/').filter(notEmpty) - const child = tree.children[pathElems.shift()] - if (!child) { - return tree - } - if (pathElems.length) { - return findDir(child, pathElems.join('/')) - } else { - return child - } + if (pathElems.length) { + return findDir(child, pathElems.join('/')) + } else { + return child } + } - function testAllYielded (tree) { - if (tree.depth) { - expect(tree.yielded).to.be.true() - } - const childrenNames = Object.keys(tree.children) - childrenNames.forEach((childName) => { - const child = tree.children[childName] - testAllYielded(child) - }) + function testAllYielded (tree) { + if (tree.depth) { + expect(tree.yielded).to.be.true() } - - function markAncestorsAsDirty (dir) { - dir.yielded = false - while (dir) { - dir = dir.parent - if (dir) { - dir.yielded = false - } + const childrenNames = Object.keys(tree.children) + childrenNames.forEach((childName) => { + const child = tree.children[childName] + testAllYielded(child) + }) + } + + function markAncestorsAsDirty (dir) { + dir.yielded = false + while (dir) { + dir = dir.parent + if (dir) { + dir.yielded = false } } - }) + } }) -} +}) function notEmpty (str) { return Boolean(str) diff --git a/test/importer.js b/test/importer.js index 71c45a8..1704396 100644 --- a/test/importer.js +++ b/test/importer.js @@ -9,13 +9,12 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const spy = require('sinon/lib/sinon/spy') -const BlockService = require('ipfs-block-service') const pull = require('pull-stream/pull') const once = require('pull-stream/sources/once') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') const CID = require('cids') -const Ipld = require('ipld') +const IPLD = require('ipld') const loadFixture = require('aegir/fixtures') const each = require('async/each') const waterfall = require('async/waterfall') @@ -174,555 +173,557 @@ const checkNodeLinks = (ipld, options, expected, done) => { ], done) } -module.exports = (repo) => { - strategies.forEach((strategy) => { - const baseFiles = strategyBaseFiles[strategy] - const defaultResults = extend({}, baseFiles, { - 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { - path: 'foo/bar/200Bytes.txt' - }), - foo: { - path: 'foo', - multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', - size: 320 - }, - 'foo/bar': { - path: 'foo/bar', - multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', - size: 270 - }, - 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { - path: 'foo-big/1.2MiB.txt' - }), - 'foo-big': { - path: 'foo-big', - multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', - size: 1328120 - }, - 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { - path: 'pim/200Bytes.txt' - }), - 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { - path: 'pim/1.2MiB.txt' - }), - pim: { - path: 'pim', - multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', - size: 1328386 - }, - 'empty-dir': { - path: 'empty-dir', - multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - size: 4 - }, - 'pam/pum': { - multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', - size: 1328386 - }, - pam: { - multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', - size: 2656553 - }, - '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { - multihash: 'zb2rhXrz1gkCv8p4nUDZRohY6MzBE9C3HVTVDP72g6Du3SD9Q', - size: 200 - }) - }, strategyOverrides[strategy]) - - const expected = extend({}, defaultResults, strategies[strategy]) +strategies.forEach((strategy) => { + const baseFiles = strategyBaseFiles[strategy] + const defaultResults = extend({}, baseFiles, { + 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'foo/bar/200Bytes.txt' + }), + foo: { + path: 'foo', + multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', + size: 320 + }, + 'foo/bar': { + path: 'foo/bar', + multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', + size: 270 + }, + 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'foo-big/1.2MiB.txt' + }), + 'foo-big': { + path: 'foo-big', + multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', + size: 1328120 + }, + 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'pim/200Bytes.txt' + }), + 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'pim/1.2MiB.txt' + }), + pim: { + path: 'pim', + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + 'empty-dir': { + path: 'empty-dir', + multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + size: 4 + }, + 'pam/pum': { + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + pam: { + multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', + size: 2656553 + }, + '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { + multihash: 'zb2rhXrz1gkCv8p4nUDZRohY6MzBE9C3HVTVDP72g6Du3SD9Q', + size: 200 + }) + }, strategyOverrides[strategy]) - describe('importer: ' + strategy, function () { - this.timeout(30 * 1000) + const expected = extend({}, defaultResults, strategies[strategy]) - let ipld + describe('importer: ' + strategy, function () { + this.timeout(30 * 1000) - const options = { - strategy: strategy, - maxChildrenPerNode: 10, - chunkerOptions: { - maxChunkSize: 1024 - } + let ipld + const options = { + strategy: strategy, + maxChildrenPerNode: 10, + chunkerOptions: { + maxChunkSize: 1024 } + } - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() - it('fails on bad input', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: 'banana' - }]), - importer(ipld, options), - pull.onEnd((err) => { - expect(err).to.exist() - done() - }) - ) - }) + ipld = resolver - it('doesn\'t yield anything on empty source', (done) => { - pull( - pull.empty(), - importer(ipld, options), - collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(0) - done() - })) + done() }) + }) - it('doesn\'t yield anything on empty file', (done) => { - pull( - values([{ - path: 'emptyfile', - content: pull.empty() - }]), - importer(ipld, options), - collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(1) - - // always yield empty node - expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH') - done() - })) - }) + it('fails on bad input', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: 'banana' + }]), + importer(ipld, options), + pull.onEnd((err) => { + expect(err).to.exist() + done() + }) + ) + }) - it('fails on more than one root', (done) => { - pull( - values([ - { - path: '/beep/200Bytes.txt', - content: values([smallFile]) - }, - { - path: '/boop/200Bytes.txt', - content: values([bigFile]) - } - ]), - importer(ipld, options), - pull.onEnd((err) => { - expect(err).to.exist() - expect(err.message).to.be.eql('detected more than one root') - done() - }) - ) - }) + it('doesn\'t yield anything on empty source', (done) => { + pull( + pull.empty(), + importer(ipld, options), + collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(0) + done() + })) + }) - it('small file with an escaped slash in the title', (done) => { - const filePath = `small-\\/file-${Math.random()}.txt` + it('doesn\'t yield anything on empty file', (done) => { + pull( + values([{ + path: 'emptyfile', + content: pull.empty() + }]), + importer(ipld, options), + collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(1) - pull( - values([{ - path: filePath, + // always yield empty node + expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH') + done() + })) + }) + + it('fails on more than one root', (done) => { + pull( + values([ + { + path: '/beep/200Bytes.txt', content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(filePath) - done() - }) - ) - }) + }, + { + path: '/boop/200Bytes.txt', + content: values([bigFile]) + } + ]), + importer(ipld, options), + pull.onEnd((err) => { + expect(err).to.exist() + expect(err.message).to.be.eql('detected more than one root') + done() + }) + ) + }) - it('small file with square brackets in the title', (done) => { - const filePath = `small-[v]-file-${Math.random()}.txt` + it('small file with an escaped slash in the title', (done) => { + const filePath = `small-\\/file-${Math.random()}.txt` - pull( - values([{ - path: filePath, - content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(filePath) - done() - }) - ) - }) + pull( + values([{ + path: filePath, + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(filePath) + done() + }) + ) + }) - it('small file (smaller than a chunk)', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) - done() - }) - ) - }) + it('small file with square brackets in the title', (done) => { + const filePath = `small-[v]-file-${Math.random()}.txt` - it('small file (smaller than a chunk) with raw leaves', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, Object.assign({}, options, { rawLeaves: true })), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt with raw leaves']]) - done() - }) - ) - }) + pull( + values([{ + path: filePath, + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(filePath) + done() + }) + ) + }) - it('small file as buffer (smaller than a chunk)', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: smallFile - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) - done() - }) - ) - }) + it('small file (smaller than a chunk)', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) - it('small file (smaller than a chunk) inside a dir', (done) => { - pull( - values([{ - path: 'foo/bar/200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, options), - collect(collected) - ) + it('small file (smaller than a chunk) with raw leaves', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, Object.assign({}, options, { rawLeaves: true })), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt with raw leaves']]) + done() + }) + ) + }) - function collected (err, files) { + it('small file as buffer (smaller than a chunk)', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: smallFile + }]), + importer(ipld, options), + collect((err, files) => { expect(err).to.not.exist() - expect(files.length).to.equal(3) - stringifyMh(files).forEach((file) => { - if (file.path === 'foo/bar/200Bytes.txt') { - expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) - } - if (file.path === 'foo') { - expect(file).to.be.eql(expected.foo) - } - if (file.path === 'foo/bar') { - expect(file).to.be.eql(expected['foo/bar']) - } - }) + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) done() - } - }) + }) + ) + }) - it('file bigger than a single chunk', function (done) { - this.timeout(60 * 1000) - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) - done() - }) - ) - }) + it('small file (smaller than a chunk) inside a dir', (done) => { + pull( + values([{ + path: 'foo/bar/200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, options), + collect(collected) + ) + + function collected (err, files) { + expect(err).to.not.exist() + expect(files.length).to.equal(3) + stringifyMh(files).forEach((file) => { + if (file.path === 'foo/bar/200Bytes.txt') { + expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) + } + if (file.path === 'foo') { + expect(file).to.be.eql(expected.foo) + } + if (file.path === 'foo/bar') { + expect(file).to.be.eql(expected['foo/bar']) + } + }) + done() + } + }) - it('file bigger than a single chunk inside a dir', function (done) { - this.timeout(60 * 1000) - pull( - values([{ - path: 'foo-big/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() + it('file bigger than a single chunk', function (done) { + this.timeout(60 * 1000) + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) + done() + }) + ) + }) - expect(stringifyMh(files)).to.be.eql([ - expected['foo-big/1.2MiB.txt'], - expected['foo-big'] - ]) + it('file bigger than a single chunk inside a dir', function (done) { + this.timeout(60 * 1000) + pull( + values([{ + path: 'foo-big/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() - done() - }) - ) - }) + expect(stringifyMh(files)).to.be.eql([ + expected['foo-big/1.2MiB.txt'], + expected['foo-big'] + ]) - it('empty directory', (done) => { - pull( - values([{ - path: 'empty-dir' - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() + done() + }) + ) + }) - expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) + it('empty directory', (done) => { + pull( + values([{ + path: 'empty-dir' + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() - done() - }) - ) - }) + expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) - it('directory with files', (done) => { - pull( - values([{ - path: 'pim/200Bytes.txt', - content: values([smallFile]) - }, { - path: 'pim/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() + done() + }) + ) + }) - expect(stringifyMh(files)).be.eql([ - expected['pim/200Bytes.txt'], - expected['pim/1.2MiB.txt'], - expected.pim] - ) + it('directory with files', (done) => { + pull( + values([{ + path: 'pim/200Bytes.txt', + content: values([smallFile]) + }, { + path: 'pim/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() - done() - }) - ) - }) + expect(stringifyMh(files)).be.eql([ + expected['pim/200Bytes.txt'], + expected['pim/1.2MiB.txt'], + expected.pim] + ) - it('nested directory (2 levels deep)', (done) => { - pull( - values([{ - path: 'pam/pum/200Bytes.txt', - content: values([smallFile]) - }, { - path: 'pam/pum/1.2MiB.txt', - content: values([bigFile]) - }, { - path: 'pam/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() + done() + }) + ) + }) - // need to sort as due to parallel storage the order - // can vary - stringifyMh(files).forEach(eachFile) + it('nested directory (2 levels deep)', (done) => { + pull( + values([{ + path: 'pam/pum/200Bytes.txt', + content: values([smallFile]) + }, { + path: 'pam/pum/1.2MiB.txt', + content: values([bigFile]) + }, { + path: 'pam/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() - done() - }) - ) + // need to sort as due to parallel storage the order + // can vary + stringifyMh(files).forEach(eachFile) - function eachFile (file) { - if (file.path === 'pam/pum/200Bytes.txt') { - expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) - expect(file.size).to.be.eql(expected['200Bytes.txt'].size) - } - if (file.path === 'pam/pum/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) - expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) - } - if (file.path === 'pam/pum') { - const dir = expected['pam/pum'] - expect(file.multihash).to.be.eql(dir.multihash) - expect(file.size).to.be.eql(dir.size) - } - if (file.path === 'pam/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) - expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) - } - if (file.path === 'pam') { - const dir = expected.pam - expect(file.multihash).to.be.eql(dir.multihash) - expect(file.size).to.be.eql(dir.size) - } - } - }) + done() + }) + ) - it('will not write to disk if passed "onlyHash" option', (done) => { - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) + function eachFile (file) { + if (file.path === 'pam/pum/200Bytes.txt') { + expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) + expect(file.size).to.be.eql(expected['200Bytes.txt'].size) } - - const options = { - onlyHash: true + if (file.path === 'pam/pum/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam/pum') { + const dir = expected['pam/pum'] + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) } + if (file.path === 'pam/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam') { + const dir = expected.pam + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + } + }) - const onCollected = (err, files) => { - if (err) return done(err) + it('will not write to disk if passed "onlyHash" option', (done) => { + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } - const file = files[0] - expect(file).to.exist() + const options = { + onlyHash: true + } - ipld.get(new CID(file.multihash), (err) => { - expect(err).to.exist() - done() - }) - } + const onCollected = (err, files) => { + if (err) return done(err) - pull( - values([inputFile]), - importer(ipld, options), - collect(onCollected) - ) - }) + const file = files[0] + expect(file).to.exist() - it('will call an optional progress function', (done) => { - options.progress = spy() + ipld.get(new CID(file.multihash), (err) => { + expect(err).to.exist() + done() + }) + } - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect(() => { - expect(options.progress.called).to.equal(true) - expect(options.progress.args[0][0]).to.equal(1024) - done() - }) - ) - }) + pull( + values([inputFile]), + importer(ipld, options), + collect(onCollected) + ) + }) - it('will import files with CID version 1', (done) => { - const createInputFile = (path, size) => { - const name = String(Math.random() + Date.now()) - path = path[path.length - 1] === '/' ? path : path + '/' - return { - path: path + name + '.txt', - content: Buffer.alloc(size).fill(1) - } - } + it('will call an optional progress function', (done) => { + options.progress = spy() + + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect(() => { + expect(options.progress.called).to.equal(true) + expect(options.progress.args[0][0]).to.equal(1024) + done() + }) + ) + }) - const inputFiles = [ - createInputFile('/foo', 10), - createInputFile('/foo', 60), - createInputFile('/foo/bar', 78), - createInputFile('/foo/baz', 200), - // Bigger than maxChunkSize - createInputFile('/foo', 262144 + 45), - createInputFile('/foo/bar', 262144 + 134), - createInputFile('/foo/bar', 262144 + 79), - createInputFile('/foo/bar', 262144 + 876), - createInputFile('/foo/bar', 262144 + 21) - ] - - const options = { - cidVersion: 1, - // Ensures we use DirSharded for the data below - shardSplitThreshold: 3 + it('will import files with CID version 1', (done) => { + const createInputFile = (path, size) => { + const name = String(Math.random() + Date.now()) + path = path[path.length - 1] === '/' ? path : path + '/' + return { + path: path + name + '.txt', + content: Buffer.alloc(size).fill(1) } + } - const onCollected = (err, files) => { - if (err) return done(err) - - const file = files[0] - expect(file).to.exist() - - each(files, (file, cb) => { - const cid = new CID(file.multihash).toV1() - const inputFile = inputFiles.find(f => f.path === file.path) - - // Just check the intermediate directory can be retrieved - if (!inputFile) { - return ipld.get(cid, cb) - } - - // Check the imported content is correct - pull( - exporter(cid, ipld), - collect((err, nodes) => { - expect(err).to.not.exist() - pull( - nodes[0].content, - collect((err, chunks) => { - expect(err).to.not.exist() - expect(Buffer.concat(chunks)).to.deep.equal(inputFile.content) - cb() - }) - ) - }) - ) - }, done) - } + const inputFiles = [ + createInputFile('/foo', 10), + createInputFile('/foo', 60), + createInputFile('/foo/bar', 78), + createInputFile('/foo/baz', 200), + // Bigger than maxChunkSize + createInputFile('/foo', 262144 + 45), + createInputFile('/foo/bar', 262144 + 134), + createInputFile('/foo/bar', 262144 + 79), + createInputFile('/foo/bar', 262144 + 876), + createInputFile('/foo/bar', 262144 + 21) + ] - pull( - // Pass a copy of inputFiles, since the importer mutates them - values(inputFiles.map(f => Object.assign({}, f))), - importer(ipld, options), - collect(onCollected) - ) - }) + const options = { + cidVersion: 1, + // Ensures we use DirSharded for the data below + shardSplitThreshold: 3 + } - it('imports file with raw leaf nodes when specified', (done) => { - checkLeafNodeTypes(ipld, { - leafType: 'raw' - }, 'raw', done) - }) + const onCollected = (err, files) => { + if (err) return done(err) - it('imports file with file leaf nodes when specified', (done) => { - checkLeafNodeTypes(ipld, { - leafType: 'file' - }, 'file', done) - }) + const file = files[0] + expect(file).to.exist() - it('reduces file to single node when specified', (done) => { - checkNodeLinks(ipld, { - reduceSingleLeafToSelf: true - }, 0, done) - }) + each(files, (file, cb) => { + const cid = new CID(file.multihash).toV1() + const inputFile = inputFiles.find(f => f.path === file.path) - it('does not reduce file to single node when overidden by options', (done) => { - checkNodeLinks(ipld, { - reduceSingleLeafToSelf: false - }, 1, done) - }) + // Just check the intermediate directory can be retrieved + if (!inputFile) { + return ipld.get(cid, cb) + } - it('uses raw leaf nodes when requested', (done) => { - this.timeout(60 * 1000) + // Check the imported content is correct + pull( + exporter(cid, ipld), + collect((err, nodes) => { + expect(err).to.not.exist() + pull( + nodes[0].content, + collect((err, chunks) => { + expect(err).to.not.exist() + expect(Buffer.concat(chunks)).to.deep.equal(inputFile.content) + cb() + }) + ) + }) + ) + }, done) + } - options.rawLeaves = true + pull( + // Pass a copy of inputFiles, since the importer mutates them + values(inputFiles.map(f => Object.assign({}, f))), + importer(ipld, options), + collect(onCollected) + ) + }) - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((error, files) => { - expect(error).to.not.exist() + it('imports file with raw leaf nodes when specified', (done) => { + checkLeafNodeTypes(ipld, { + leafType: 'raw' + }, 'raw', done) + }) - const node = files[0] + it('imports file with file leaf nodes when specified', (done) => { + checkLeafNodeTypes(ipld, { + leafType: 'file' + }, 'file', done) + }) - collectLeafCids(ipld, node.multihash, (error, cids) => { - expect(error).to.be.not.ok() + it('reduces file to single node when specified', (done) => { + checkNodeLinks(ipld, { + reduceSingleLeafToSelf: true + }, 0, done) + }) - const rawNodes = cids - .filter(cid => cid.codec === 'raw') + it('does not reduce file to single node when overidden by options', (done) => { + checkNodeLinks(ipld, { + reduceSingleLeafToSelf: false + }, 1, done) + }) - expect(rawNodes).to.not.be.empty() + it('uses raw leaf nodes when requested', (done) => { + this.timeout(60 * 1000) - rawNodes - .forEach(cid => { - expect(cid.version).to.equal(1) - }) + options.rawLeaves = true - done() - }) + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((error, files) => { + expect(error).to.not.exist() + + const node = files[0] + + collectLeafCids(ipld, node.multihash, (error, cids) => { + expect(error).to.be.not.ok() + + const rawNodes = cids + .filter(cid => cid.codec === 'raw') + + expect(rawNodes).to.not.be.empty() + + rawNodes + .forEach(cid => { + expect(cid.version).to.equal(1) + }) + + done() }) - ) - }) + }) + ) }) }) -} +}) diff --git a/test/node.js b/test/node.js index 196394e..527f5e5 100644 --- a/test/node.js +++ b/test/node.js @@ -1,61 +1,24 @@ /* eslint-env mocha */ 'use strict' -const ncp = require('ncp').ncp -const rimraf = require('rimraf') -const path = require('path') -const os = require('os') -const IPFSRepo = require('ipfs-repo') -const mkdirp = require('mkdirp') -const series = require('async/series') - -describe('IPFS UnixFS Engine', () => { - const repoExample = path.join(process.cwd(), 'test', 'test-repo') - const repoTests = path.join(os.tmpdir(), 'unixfs-tests-' + Date.now()) - - const repo = new IPFSRepo(repoTests) - - before((done) => { - const paths = [ - 'test-data/dir-nested/dir-another', - 'test-data/dir-nested/level-1/level-2' - ] - process.env.IPFS_PATH = repoTests - series([ - (cb) => ncp(repoExample, repoTests, cb), - (cb) => repo.open(cb), - (cb) => series(paths.map((p) => (cb) => { - mkdirp(path.join(__dirname, p), cb) - }), cb) - ], done) - }) - - after((done) => { - series([ - (cb) => repo.close(cb), - (cb) => rimraf(repoTests, cb) - ], done) - }) - - // Chunkers - require('./chunker-fixed-size') - require('./chunker-rabin') - - // Graph Builders - require('./builder')(repo) - require('./builder-flat') - require('./builder-balanced') - require('./builder-trickle-dag') - require('./builder-only-hash')(repo) - require('./builder-dir-sharding')(repo) - - // Importer - require('./importer')(repo) - require('./importer-flush')(repo) - - // Other - require('./import-export')(repo) - require('./import-export-nested-dir')(repo) - require('./hash-parity-with-go-ipfs')(repo) - require('./with-dag-api') -}) +// Chunkers +require('./chunker-fixed-size') +require('./chunker-rabin') + +// Graph Builders +require('./builder') +require('./builder-flat') +require('./builder-balanced') +require('./builder-trickle-dag') +require('./builder-only-hash') +require('./builder-dir-sharding') + +// Importer +require('./importer') +require('./importer-flush') + +// Other +require('./import-export') +require('./import-export-nested-dir') +require('./hash-parity-with-go-ipfs') +require('./with-dag-api') diff --git a/test/with-dag-api.js b/test/with-dag-api.js index 3f1d8e4..c1a8950 100644 --- a/test/with-dag-api.js +++ b/test/with-dag-api.js @@ -176,8 +176,6 @@ describe('with dag-api', function () { } before(function (done) { - this.timeout(30 * 1000) - IPLD.inMemory((err, resolver) => { if (err) { return done(err) From bd54989724e39a76dd5366da73313df1477e0ebc Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 13 Dec 2018 16:36:29 +0000 Subject: [PATCH 2/4] test: rename files and remove some unused deps --- package.json | 1 + ...r-balanced.js => builder-balanced.spec.js} | 0 ...arding.js => builder-dir-sharding.spec.js} | 0 .../{builder-flat.js => builder-flat.spec.js} | 0 ...only-hash.js => builder-only-hash.spec.js} | 0 ...kle-dag.js => builder-trickle-dag.spec.js} | 0 test/{builder.js => builder.spec.js} | 0 ...xed-size.js => chunker-fixed-size.spec.js} | 10 +++++-- ...owser.js => chunker-rabin-browser.spec.js} | 13 +++++++-- ...chunker-rabin.js => chunker-rabin.spec.js} | 7 ++++- ...fs.js => hash-parity-with-go-ipfs.spec.js} | 2 +- ...ir.js => import-export-nested-dir.spec.js} | 2 +- ...import-export.js => import-export.spec.js} | 2 +- ...porter-flush.js => importer-flush.spec.js} | 2 +- test/{importer.js => importer.spec.js} | 2 +- test/node.js | 29 ++++++++++--------- .../{with-dag-api.js => with-dag-api.spec.js} | 0 17 files changed, 45 insertions(+), 25 deletions(-) rename test/{builder-balanced.js => builder-balanced.spec.js} (100%) rename test/{builder-dir-sharding.js => builder-dir-sharding.spec.js} (100%) rename test/{builder-flat.js => builder-flat.spec.js} (100%) rename test/{builder-only-hash.js => builder-only-hash.spec.js} (100%) rename test/{builder-trickle-dag.js => builder-trickle-dag.spec.js} (100%) rename test/{builder.js => builder.spec.js} (100%) rename test/{chunker-fixed-size.js => chunker-fixed-size.spec.js} (93%) rename test/{chunker-rabin-browser.js => chunker-rabin-browser.spec.js} (74%) rename test/{chunker-rabin.js => chunker-rabin.spec.js} (94%) rename test/{hash-parity-with-go-ipfs.js => hash-parity-with-go-ipfs.spec.js} (97%) rename test/{import-export-nested-dir.js => import-export-nested-dir.spec.js} (99%) rename test/{import-export.js => import-export.spec.js} (98%) rename test/{importer-flush.js => importer-flush.spec.js} (99%) rename test/{importer.js => importer.spec.js} (99%) rename test/{with-dag-api.js => with-dag-api.spec.js} (100%) diff --git a/package.json b/package.json index 371c269..723d5c3 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,7 @@ "devDependencies": { "aegir": "^17.0.0", "chai": "^4.2.0", + "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", "ipfs-unixfs-exporter": "~0.35.4", "ipld": "~0.20.0", diff --git a/test/builder-balanced.js b/test/builder-balanced.spec.js similarity index 100% rename from test/builder-balanced.js rename to test/builder-balanced.spec.js diff --git a/test/builder-dir-sharding.js b/test/builder-dir-sharding.spec.js similarity index 100% rename from test/builder-dir-sharding.js rename to test/builder-dir-sharding.spec.js diff --git a/test/builder-flat.js b/test/builder-flat.spec.js similarity index 100% rename from test/builder-flat.js rename to test/builder-flat.spec.js diff --git a/test/builder-only-hash.js b/test/builder-only-hash.spec.js similarity index 100% rename from test/builder-only-hash.js rename to test/builder-only-hash.spec.js diff --git a/test/builder-trickle-dag.js b/test/builder-trickle-dag.spec.js similarity index 100% rename from test/builder-trickle-dag.js rename to test/builder-trickle-dag.spec.js diff --git a/test/builder.js b/test/builder.spec.js similarity index 100% rename from test/builder.js rename to test/builder.spec.js diff --git a/test/chunker-fixed-size.js b/test/chunker-fixed-size.spec.js similarity index 93% rename from test/chunker-fixed-size.js rename to test/chunker-fixed-size.spec.js index c1c5b95..4110e23 100644 --- a/test/chunker-fixed-size.js +++ b/test/chunker-fixed-size.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/fixed-size') +const chunker = require('../src/chunker/fixed-size') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect @@ -11,12 +11,18 @@ const values = require('pull-stream/sources/values') const take = require('pull-stream/throughs/take') const collect = require('pull-stream/sinks/collect') const loadFixture = require('aegir/fixtures') - +const isNode = require('detect-node') const rawFile = loadFixture('test/fixtures/1MiB.txt') describe('chunker: fixed size', function () { this.timeout(30000) + before(function () { + if (!isNode) { + this.skip() + } + }) + it('chunks non flat buffers', (done) => { const b1 = Buffer.alloc(2 * 256) const b2 = Buffer.alloc(1 * 256) diff --git a/test/chunker-rabin-browser.js b/test/chunker-rabin-browser.spec.js similarity index 74% rename from test/chunker-rabin-browser.js rename to test/chunker-rabin-browser.spec.js index 2daf6da..fe4ca62 100644 --- a/test/chunker-rabin-browser.js +++ b/test/chunker-rabin-browser.spec.js @@ -1,16 +1,23 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/rabin') +const chunker = require('../src/chunker/rabin') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') +const isNode = require('detect-node') -describe('chunker: rabin browser', function () { - it('returns an error', (done) => { +describe('chunker: rabin browser', () => { + before(function () { + if (isNode) { + this.skip() + } + }) + + it('returns an error', function (done) { const b1 = Buffer.alloc(2 * 256) const b2 = Buffer.alloc(1 * 256) const b3 = Buffer.alloc(5 * 256) diff --git a/test/chunker-rabin.js b/test/chunker-rabin.spec.js similarity index 94% rename from test/chunker-rabin.js rename to test/chunker-rabin.spec.js index 8fbd2d0..29c1dfa 100644 --- a/test/chunker-rabin.js +++ b/test/chunker-rabin.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/rabin') +const chunker = require('../src/chunker/rabin') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect @@ -10,6 +10,7 @@ const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') const loadFixture = require('aegir/fixtures') const os = require('os') +const isNode = require('detect-node') const rawFile = loadFixture('test/fixtures/1MiB.txt') @@ -20,6 +21,10 @@ describe('chunker: rabin', function () { if (os.platform() === 'win32') { return this.skip() } + + if (!isNode) { + this.skip() + } }) it('chunks non flat buffers', (done) => { diff --git a/test/hash-parity-with-go-ipfs.js b/test/hash-parity-with-go-ipfs.spec.js similarity index 97% rename from test/hash-parity-with-go-ipfs.js rename to test/hash-parity-with-go-ipfs.spec.js index a9ef437..7d4271d 100644 --- a/test/hash-parity-with-go-ipfs.js +++ b/test/hash-parity-with-go-ipfs.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const importer = require('./../src') +const importer = require('../src') const chai = require('chai') chai.use(require('dirty-chai')) diff --git a/test/import-export-nested-dir.js b/test/import-export-nested-dir.spec.js similarity index 99% rename from test/import-export-nested-dir.js rename to test/import-export-nested-dir.spec.js index 8b5f371..ec54cec 100644 --- a/test/import-export-nested-dir.js +++ b/test/import-export-nested-dir.spec.js @@ -11,7 +11,7 @@ const collect = require('pull-stream/sinks/collect') const map = require('async/map') const CID = require('cids') -const importer = require('./../src') +const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') describe('import and export: directory', () => { diff --git a/test/import-export.js b/test/import-export.spec.js similarity index 98% rename from test/import-export.js rename to test/import-export.spec.js index f2a276d..402c302 100644 --- a/test/import-export.js +++ b/test/import-export.spec.js @@ -15,7 +15,7 @@ const collect = require('pull-stream/sinks/collect') const loadFixture = require('aegir/fixtures') const bigFile = loadFixture('test/fixtures/1.2MiB.txt') -const importer = require('./../src') +const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') const strategies = [ diff --git a/test/importer-flush.js b/test/importer-flush.spec.js similarity index 99% rename from test/importer-flush.js rename to test/importer-flush.spec.js index 0144929..e0dd6c8 100644 --- a/test/importer-flush.js +++ b/test/importer-flush.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const createImporter = require('./../src') +const createImporter = require('../src') const chai = require('chai') chai.use(require('dirty-chai')) diff --git a/test/importer.js b/test/importer.spec.js similarity index 99% rename from test/importer.js rename to test/importer.spec.js index 1704396..f92f419 100644 --- a/test/importer.js +++ b/test/importer.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const importer = require('./../src') +const importer = require('../src') const exporter = require('ipfs-unixfs-exporter') const extend = require('deep-extend') diff --git a/test/node.js b/test/node.js index 527f5e5..8ff0775 100644 --- a/test/node.js +++ b/test/node.js @@ -2,23 +2,24 @@ 'use strict' // Chunkers -require('./chunker-fixed-size') -require('./chunker-rabin') +require('./chunker-fixed-size.spec') +require('./chunker-rabin.spec') +require('./chunker-rabin-browser.spec') // Graph Builders -require('./builder') -require('./builder-flat') -require('./builder-balanced') -require('./builder-trickle-dag') -require('./builder-only-hash') -require('./builder-dir-sharding') +require('./builder.spec') +require('./builder-flat.spec') +require('./builder-balanced.spec') +require('./builder-trickle-dag.spec') +require('./builder-only-hash.spec') +require('./builder-dir-sharding.spec') // Importer -require('./importer') -require('./importer-flush') +require('./importer.spec') +require('./importer-flush.spec') // Other -require('./import-export') -require('./import-export-nested-dir') -require('./hash-parity-with-go-ipfs') -require('./with-dag-api') +require('./import-export.spec') +require('./import-export-nested-dir.spec') +require('./hash-parity-with-go-ipfs.spec') +require('./with-dag-api.spec') diff --git a/test/with-dag-api.js b/test/with-dag-api.spec.js similarity index 100% rename from test/with-dag-api.js rename to test/with-dag-api.spec.js From 69210b6ef211671255ff987620b15928c3f3d4b5 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 13 Dec 2018 17:28:10 +0000 Subject: [PATCH 3/4] fix: increase sharding timeouts --- test/builder-dir-sharding.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/builder-dir-sharding.spec.js b/test/builder-dir-sharding.spec.js index d1727be..015da46 100644 --- a/test/builder-dir-sharding.spec.js +++ b/test/builder-dir-sharding.spec.js @@ -19,7 +19,7 @@ const leftPad = require('left-pad') const CID = require('cids') describe('builder: directory sharding', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipld From ad1b0967fff396a368bfff0512b996686079650a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 19 Dec 2018 11:42:11 +0000 Subject: [PATCH 4/4] chore: remove redundant test importers --- test/browser.js | 4 ---- test/node.js | 25 ------------------------- 2 files changed, 29 deletions(-) delete mode 100644 test/browser.js delete mode 100644 test/node.js diff --git a/test/browser.js b/test/browser.js deleted file mode 100644 index 7e94834..0000000 --- a/test/browser.js +++ /dev/null @@ -1,4 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -require('./node') diff --git a/test/node.js b/test/node.js deleted file mode 100644 index 8ff0775..0000000 --- a/test/node.js +++ /dev/null @@ -1,25 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -// Chunkers -require('./chunker-fixed-size.spec') -require('./chunker-rabin.spec') -require('./chunker-rabin-browser.spec') - -// Graph Builders -require('./builder.spec') -require('./builder-flat.spec') -require('./builder-balanced.spec') -require('./builder-trickle-dag.spec') -require('./builder-only-hash.spec') -require('./builder-dir-sharding.spec') - -// Importer -require('./importer.spec') -require('./importer-flush.spec') - -// Other -require('./import-export.spec') -require('./import-export-nested-dir.spec') -require('./hash-parity-with-go-ipfs.spec') -require('./with-dag-api.spec')