diff --git a/package.json b/package.json index c24571e..723d5c3 100644 --- a/package.json +++ b/package.json @@ -39,18 +39,13 @@ "devDependencies": { "aegir": "^17.0.0", "chai": "^4.2.0", + "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", - "ipfs-block-service": "~0.15.1", - "ipfs-repo": "~0.25.0", "ipfs-unixfs-exporter": "~0.35.4", "ipld": "~0.20.0", - "mkdirp": "~0.5.1", "multihashes": "~0.4.14", - "ncp": "^2.0.0", "pull-generate": "^2.2.0", - "pull-stream-to-stream": "^1.3.4", - "pull-zip": "^2.0.1", - "rimraf": "^2.6.2", + "pull-traverse": "^1.0.3", "sinon": "^7.1.0" }, "dependencies": { @@ -65,14 +60,11 @@ "multihashing-async": "~0.5.1", "pull-batch": "^1.0.0", "pull-block": "^1.4.0", - "pull-cat": "^1.1.11", "pull-pair": "^1.1.0", - "pull-paramap": "^1.2.2", "pull-pause": "0.0.2", "pull-pushable": "^2.2.0", "pull-stream": "^3.6.9", "pull-through": "^1.0.18", - "pull-traverse": "^1.0.3", "pull-write": "^1.1.4", "stream-to-pull-stream": "^1.7.2" }, diff --git a/test/browser.js b/test/browser.js deleted file mode 100644 index e846743..0000000 --- a/test/browser.js +++ /dev/null @@ -1,59 +0,0 @@ -/* eslint-env mocha */ -/* global self */ -'use strict' - -const series = require('async/series') -const IPFSRepo = require('ipfs-repo') - -const idb = self.indexedDB || - self.mozIndexedDB || - self.webkitIndexedDB || - self.msIndexedDB - -idb.deleteDatabase('ipfs') -idb.deleteDatabase('ipfs/blocks') - -describe('IPFS data importing tests on the Browser', function () { - const repo = new IPFSRepo('ipfs') - - before((done) => { - series([ - (cb) => repo.init({}, cb), - (cb) => repo.open(cb) - ], done) - }) - - after((done) => { - series([ - (cb) => repo.close(cb), - (cb) => { - idb.deleteDatabase('ipfs') - idb.deleteDatabase('ipfs/blocks') - cb() - } - ], done) - }) - - // Chunkers - require('./chunker-fixed-size') - require('./chunker-rabin-browser') - - // Graph Builders - require('./builder')(repo) - require('./builder-flat') - require('./builder-balanced') - require('./builder-trickle-dag') - require('./builder-only-hash')(repo) - // TODO: make these tests not require data on the repo - // require('./builder-dir-sharding')(repo) - - // Importer - require('./importer')(repo) - require('./importer-flush')(repo) - - // Other - require('./import-export')(repo) - require('./import-export-nested-dir')(repo) - require('./hash-parity-with-go-ipfs')(repo) - // require('./with-dag-api') -}) diff --git a/test/builder-balanced.js b/test/builder-balanced.spec.js similarity index 99% rename from test/builder-balanced.js rename to test/builder-balanced.spec.js index 4c8bb28..73fe7e6 100644 --- a/test/builder-balanced.js +++ b/test/builder-balanced.spec.js @@ -7,7 +7,6 @@ const expect = chai.expect const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') - const builder = require('../src/builder/balanced') function reduce (leaves, callback) { diff --git a/test/builder-dir-sharding.js b/test/builder-dir-sharding.js deleted file mode 100644 index 267e088..0000000 --- a/test/builder-dir-sharding.js +++ /dev/null @@ -1,398 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const importer = require('./../src') -const exporter = require('ipfs-unixfs-exporter') - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const asyncMap = require('pull-stream/throughs/async-map') -const collect = require('pull-stream/sinks/collect') -const pushable = require('pull-pushable') -const whilst = require('async/whilst') -const setImmediate = require('async/setImmediate') -const leftPad = require('left-pad') -const CID = require('cids') - -module.exports = (repo) => { - describe('builder: directory sharding', function () { - this.timeout(30 * 1000) - - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - describe('basic dirbuilder', () => { - let nonShardedHash, shardedHash - - it('yields a non-sharded dir', (done) => { - const options = { - shardSplitThreshold: Infinity // never shard - } - - pull( - values([ - { - path: 'a/b', - content: pull.values([Buffer.from('i have the best bytes')]) - } - ]), - importer(ipld, options), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - nonShardedHash = nodes[1].multihash - expect(nonShardedHash).to.exist() - done() - } catch (err) { - done(err) - } - }) - ) - }) - - it('yields a sharded dir', (done) => { - const options = { - shardSplitThreshold: 0 // always shard - } - - pull( - values([ - { - path: 'a/b', - content: values([Buffer.from('i have the best bytes')]) - } - ]), - importer(ipld, options), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - shardedHash = nodes[1].multihash - // hashes are different - expect(shardedHash).to.not.equal(nonShardedHash) - done() - } catch (err) { - done(err) - } - }) - ) - }) - - it('exporting unsharded hash results in the correct files', (done) => { - pull( - exporter(nonShardedHash, ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(nonShardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(29) - } catch (err) { - return done(err) - } - - pull( - nodes[1].content, - collect(collected) - ) - }) - ) - - function collected (err, content) { - try { - expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') - done() - } catch (err) { - done(err) - } - } - }) - - it('exporting sharded hash results in the correct files', (done) => { - pull( - exporter(shardedHash, ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(shardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(21) - } catch (err) { - return done(err) - } - - pull( - nodes[1].content, - collect(collected) - ) - }) - ) - - function collected (err, content) { - try { - expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') - done() - } catch (err) { - done(err) - } - } - }) - }) - - describe('big dir', () => { - const maxDirs = 2000 - let rootHash - - it('imports a big dir', (done) => { - const push = pushable() - pull( - push, - importer(ipld), - collect((err, nodes) => { - try { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(maxDirs + 1) - const last = nodes[nodes.length - 1] - expect(last.path).to.be.eql('big') - rootHash = last.multihash - done() - } catch (err) { - done(err) - } - }) - ) - - let pending = maxDirs - let i = 0 - - whilst( - () => pending, - (callback) => { - pending-- - i++ - const pushable = { - path: 'big/' + leftPad(i.toString(), 4, '0'), - content: values([Buffer.from(i.toString())]) - } - push.push(pushable) - setImmediate(callback) - }, - (err) => { - expect(err).to.not.exist() - push.end() - } - ) - }) - - it('exports a big dir', (done) => { - const contentEntries = [] - const entries = {} - pull( - exporter(rootHash, ipld), - asyncMap((node, callback) => { - if (node.content) { - pull( - node.content, - collect(collected) - ) - } else { - entries[node.path] = node - callback() - } - - function collected (err, content) { - expect(err).to.not.exist() - entries[node.path] = { content: content.toString() } - callback(null, node) - } - }), - collect((err, nodes) => { - expect(err).to.not.exist() - const paths = Object.keys(entries).sort() - expect(paths.length).to.be.eql(2001) - paths.forEach(eachPath) - done() - }) - ) - - function eachPath (path, index) { - if (!index) { - // first dir - expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) - const entry = entries[path] - expect(entry).to.exist() - expect(entry.content).to.not.exist() - return - } - // dir entries - const content = entries[path] && entries[path].content - if (content) { - expect(content).to.be.eql(index.toString()) - contentEntries.push(path) - } - } - }) - }) - - describe('big nested dir', () => { - const maxDirs = 2000 - const maxDepth = 3 - let rootHash - - it('imports a big dir', (done) => { - const push = pushable() - pull( - push, - importer(ipld), - collect((err, nodes) => { - expect(err).to.not.exist() - const last = nodes[nodes.length - 1] - expect(last.path).to.be.eql('big') - rootHash = last.multihash - done() - }) - ) - - let pending = maxDirs - let pendingDepth = maxDepth - let i = 0 - let depth = 1 - - whilst( - () => pendingDepth && pending, - (callback) => { - i++ - const dir = [] - for (let d = 0; d < depth; d++) { - dir.push('big') - } - const pushed = { - path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'), - content: values([Buffer.from(i.toString())]) - } - push.push(pushed) - pending-- - if (!pending) { - pendingDepth-- - pending = maxDirs - i = 0 - depth++ - } - setImmediate(callback) - }, - (err) => { - expect(err).to.not.exist() - push.end() - } - ) - }) - - it('exports a big dir', (done) => { - const entries = {} - pull( - exporter(rootHash, ipld), - asyncMap((node, callback) => { - if (node.content) { - pull( - node.content, - collect(collected) - ) - } else { - entries[node.path] = node - callback() - } - - function collected (err, content) { - expect(err).to.not.exist() - entries[node.path] = { content: content.toString() } - callback(null, node) - } - }), - collect(collected) - ) - - function collected (err, nodes) { - expect(err).to.not.exist() - const paths = Object.keys(entries).sort() - expect(paths.length).to.be.eql(maxDepth * maxDirs + maxDepth) - let index = 0 - let depth = 1 - paths.forEach(eachPath) - done() - - function eachPath (path) { - if (!index) { - // first dir - if (depth === 1) { - expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) - } - const entry = entries[path] - expect(entry).to.exist() - expect(entry.content).to.not.exist() - } else { - // dir entries - const pathElements = path.split('/') - expect(pathElements.length).to.be.eql(depth + 1) - const lastElement = pathElements[pathElements.length - 1] - expect(lastElement).to.be.eql(leftPad(index.toString(), 4, '0')) - expect(entries[path].content).to.be.eql(index.toString()) - } - index++ - if (index > maxDirs) { - index = 0 - depth++ - } - } - } - }) - - it('exports a big dir with subpath', (done) => { - const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000' - pull( - exporter(exportHash, ipld), - collect(collected) - ) - - function collected (err, nodes) { - expect(err).to.not.exist() - expect(nodes.length).to.equal(1) - expect(nodes.map((node) => node.path)).to.deep.equal([ - '2000' - ]) - pull( - nodes[0].content, - collect((err, content) => { - expect(err).to.not.exist() - expect(content.toString()).to.equal('2000') - done() - }) - ) - } - }) - }) - }) -} diff --git a/test/builder-dir-sharding.spec.js b/test/builder-dir-sharding.spec.js new file mode 100644 index 0000000..015da46 --- /dev/null +++ b/test/builder-dir-sharding.spec.js @@ -0,0 +1,400 @@ +/* eslint-env mocha */ +'use strict' + +const importer = require('../src') +const exporter = require('ipfs-unixfs-exporter') + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const IPLD = require('ipld') +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const asyncMap = require('pull-stream/throughs/async-map') +const collect = require('pull-stream/sinks/collect') +const pushable = require('pull-pushable') +const whilst = require('async/whilst') +const setImmediate = require('async/setImmediate') +const leftPad = require('left-pad') +const CID = require('cids') + +describe('builder: directory sharding', function () { + this.timeout(60 * 1000) + + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + describe('basic dirbuilder', () => { + let nonShardedHash, shardedHash + + it('yields a non-sharded dir', (done) => { + const options = { + shardSplitThreshold: Infinity // never shard + } + + pull( + values([ + { + path: 'a/b', + content: pull.values([Buffer.from('i have the best bytes')]) + } + ]), + importer(ipld, options), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + nonShardedHash = nodes[1].multihash + expect(nonShardedHash).to.exist() + done() + } catch (err) { + done(err) + } + }) + ) + }) + + it('yields a sharded dir', (done) => { + const options = { + shardSplitThreshold: 0 // always shard + } + + pull( + values([ + { + path: 'a/b', + content: values([Buffer.from('i have the best bytes')]) + } + ]), + importer(ipld, options), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + shardedHash = nodes[1].multihash + // hashes are different + expect(shardedHash).to.not.equal(nonShardedHash) + done() + } catch (err) { + done(err) + } + }) + ) + }) + + it('exporting unsharded hash results in the correct files', (done) => { + pull( + exporter(nonShardedHash, ipld), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(nonShardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(29) + } catch (err) { + return done(err) + } + + pull( + nodes[1].content, + collect(collected) + ) + }) + ) + + function collected (err, content) { + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) + } + } + }) + + it('exporting sharded hash results in the correct files', (done) => { + pull( + exporter(shardedHash, ipld), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(shardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(21) + } catch (err) { + return done(err) + } + + pull( + nodes[1].content, + collect(collected) + ) + }) + ) + + function collected (err, content) { + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) + } + } + }) + }) + + describe('big dir', () => { + const maxDirs = 2000 + let rootHash + + it('imports a big dir', (done) => { + const push = pushable() + pull( + push, + importer(ipld), + collect((err, nodes) => { + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(maxDirs + 1) + const last = nodes[nodes.length - 1] + expect(last.path).to.be.eql('big') + rootHash = last.multihash + done() + } catch (err) { + done(err) + } + }) + ) + + let pending = maxDirs + let i = 0 + + whilst( + () => pending, + (callback) => { + pending-- + i++ + const pushable = { + path: 'big/' + leftPad(i.toString(), 4, '0'), + content: values([Buffer.from(i.toString())]) + } + push.push(pushable) + setImmediate(callback) + }, + (err) => { + expect(err).to.not.exist() + push.end() + } + ) + }) + + it('exports a big dir', (done) => { + const contentEntries = [] + const entries = {} + pull( + exporter(rootHash, ipld), + asyncMap((node, callback) => { + if (node.content) { + pull( + node.content, + collect(collected) + ) + } else { + entries[node.path] = node + callback() + } + + function collected (err, content) { + expect(err).to.not.exist() + entries[node.path] = { content: content.toString() } + callback(null, node) + } + }), + collect((err, nodes) => { + expect(err).to.not.exist() + const paths = Object.keys(entries).sort() + expect(paths.length).to.be.eql(2001) + paths.forEach(eachPath) + done() + }) + ) + + function eachPath (path, index) { + if (!index) { + // first dir + expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) + const entry = entries[path] + expect(entry).to.exist() + expect(entry.content).to.not.exist() + return + } + // dir entries + const content = entries[path] && entries[path].content + if (content) { + expect(content).to.be.eql(index.toString()) + contentEntries.push(path) + } + } + }) + }) + + describe('big nested dir', () => { + const maxDirs = 2000 + const maxDepth = 3 + let rootHash + + it('imports a big dir', (done) => { + const push = pushable() + pull( + push, + importer(ipld), + collect((err, nodes) => { + expect(err).to.not.exist() + const last = nodes[nodes.length - 1] + expect(last.path).to.be.eql('big') + rootHash = last.multihash + done() + }) + ) + + let pending = maxDirs + let pendingDepth = maxDepth + let i = 0 + let depth = 1 + + whilst( + () => pendingDepth && pending, + (callback) => { + i++ + const dir = [] + for (let d = 0; d < depth; d++) { + dir.push('big') + } + const pushed = { + path: dir.concat(leftPad(i.toString(), 4, '0')).join('/'), + content: values([Buffer.from(i.toString())]) + } + push.push(pushed) + pending-- + if (!pending) { + pendingDepth-- + pending = maxDirs + i = 0 + depth++ + } + setImmediate(callback) + }, + (err) => { + expect(err).to.not.exist() + push.end() + } + ) + }) + + it('exports a big dir', (done) => { + const entries = {} + pull( + exporter(rootHash, ipld), + asyncMap((node, callback) => { + if (node.content) { + pull( + node.content, + collect(collected) + ) + } else { + entries[node.path] = node + callback() + } + + function collected (err, content) { + expect(err).to.not.exist() + entries[node.path] = { content: content.toString() } + callback(null, node) + } + }), + collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + const paths = Object.keys(entries).sort() + expect(paths.length).to.be.eql(maxDepth * maxDirs + maxDepth) + let index = 0 + let depth = 1 + paths.forEach(eachPath) + done() + + function eachPath (path) { + if (!index) { + // first dir + if (depth === 1) { + expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString()) + } + const entry = entries[path] + expect(entry).to.exist() + expect(entry.content).to.not.exist() + } else { + // dir entries + const pathElements = path.split('/') + expect(pathElements.length).to.be.eql(depth + 1) + const lastElement = pathElements[pathElements.length - 1] + expect(lastElement).to.be.eql(leftPad(index.toString(), 4, '0')) + expect(entries[path].content).to.be.eql(index.toString()) + } + index++ + if (index > maxDirs) { + index = 0 + depth++ + } + } + } + }) + + it('exports a big dir with subpath', (done) => { + const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000' + pull( + exporter(exportHash, ipld), + collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + expect(nodes.length).to.equal(1) + expect(nodes.map((node) => node.path)).to.deep.equal([ + '2000' + ]) + pull( + nodes[0].content, + collect((err, content) => { + expect(err).to.not.exist() + expect(content.toString()).to.equal('2000') + done() + }) + ) + } + }) + }) +}) diff --git a/test/builder-flat.js b/test/builder-flat.spec.js similarity index 100% rename from test/builder-flat.js rename to test/builder-flat.spec.js diff --git a/test/builder-only-hash.js b/test/builder-only-hash.js deleted file mode 100644 index 7bf14fd..0000000 --- a/test/builder-only-hash.js +++ /dev/null @@ -1,55 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const Ipld = require('ipld') -const CID = require('cids') -const createBuilder = require('../src/builder') -const FixedSizeChunker = require('../src/chunker/fixed-size') - -module.exports = (repo) => { - describe('builder: onlyHash', () => { - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('will only chunk and hash if passed an "onlyHash" option', (done) => { - const onCollected = (err, nodes) => { - if (err) return done(err) - - const node = nodes[0] - expect(node).to.exist() - - ipld.get(new CID(node.multihash), (err, res) => { - expect(err).to.exist() - done() - }) - } - - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) - } - - const options = { - onlyHash: true - } - - pull( - values([inputFile]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }) - }) -} diff --git a/test/builder-only-hash.spec.js b/test/builder-only-hash.spec.js new file mode 100644 index 0000000..464664d --- /dev/null +++ b/test/builder-only-hash.spec.js @@ -0,0 +1,57 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const collect = require('pull-stream/sinks/collect') +const IPLD = require('ipld') +const CID = require('cids') +const createBuilder = require('../src/builder') +const FixedSizeChunker = require('../src/chunker/fixed-size') + +describe('builder: onlyHash', () => { + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('will only chunk and hash if passed an "onlyHash" option', (done) => { + const onCollected = (err, nodes) => { + if (err) return done(err) + + const node = nodes[0] + expect(node).to.exist() + + ipld.get(new CID(node.multihash), (err, res) => { + expect(err).to.exist() + done() + }) + } + + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } + + const options = { + onlyHash: true + } + + pull( + values([inputFile]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }) +}) diff --git a/test/builder-trickle-dag.js b/test/builder-trickle-dag.spec.js similarity index 100% rename from test/builder-trickle-dag.js rename to test/builder-trickle-dag.spec.js diff --git a/test/builder.js b/test/builder.js deleted file mode 100644 index e82f0ab..0000000 --- a/test/builder.js +++ /dev/null @@ -1,145 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const mh = require('multihashes') -const IPLD = require('ipld') -const eachSeries = require('async').eachSeries -const CID = require('cids') -const UnixFS = require('ipfs-unixfs') -const createBuilder = require('../src/builder') -const FixedSizeChunker = require('../src/chunker/fixed-size') - -module.exports = (repo) => { - describe('builder', () => { - let ipld - - const testMultihashes = Object.keys(mh.names).slice(1, 40) - - before((done) => { - IPLD.inMemory((err, resolver) => { - if (err) { - return done(err) - } - - ipld = resolver - - done() - }) - }) - - it('allows multihash hash algorithm to be specified', (done) => { - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) - } - - const onCollected = (err, nodes) => { - if (err) return cb(err) - - const node = nodes[0] - expect(node).to.exist() - - const cid = new CID(node.multihash) - - // Verify multihash has been encoded using hashAlg - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - - // Fetch using hashAlg encoded multihash - ipld.get(cid, (err, res) => { - if (err) return cb(err) - const content = UnixFS.unmarshal(res.value.data).data - expect(content.equals(inputFile.content)).to.be.true() - cb() - }) - } - - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) - - it('allows multihash hash algorithm to be specified for big file', function (done) { - this.timeout(30000) - - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - // Bigger than maxChunkSize - content: Buffer.alloc(262144 + 5).fill(1) - } - - const onCollected = (err, nodes) => { - if (err) return cb(err) - - const node = nodes[0] - - try { - expect(node).to.exist() - const cid = new CID(node.multihash) - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - } catch (err) { - return cb(err) - } - - cb() - } - - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) - - it('allows multihash hash algorithm to be specified for a directory', (done) => { - eachSeries(testMultihashes, (hashAlg, cb) => { - const options = { hashAlg, strategy: 'flat' } - const inputFile = { - path: `${String(Math.random() + Date.now())}-dir`, - content: null - } - - const onCollected = (err, nodes) => { - if (err) return cb(err) - - const node = nodes[0] - - expect(node).to.exist() - - const cid = new CID(node.multihash) - - expect(mh.decode(cid.multihash).name).to.equal(hashAlg) - - // Fetch using hashAlg encoded multihash - ipld.get(cid, (err, res) => { - if (err) return cb(err) - const meta = UnixFS.unmarshal(res.value.data) - expect(meta.type).to.equal('directory') - cb() - }) - } - - pull( - values([Object.assign({}, inputFile)]), - createBuilder(FixedSizeChunker, ipld, options), - collect(onCollected) - ) - }, done) - }) - }) -} diff --git a/test/builder.spec.js b/test/builder.spec.js new file mode 100644 index 0000000..5975009 --- /dev/null +++ b/test/builder.spec.js @@ -0,0 +1,141 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const collect = require('pull-stream/sinks/collect') +const mh = require('multihashes') +const IPLD = require('ipld') +const eachSeries = require('async').eachSeries +const CID = require('cids') +const UnixFS = require('ipfs-unixfs') +const createBuilder = require('../src/builder') +const FixedSizeChunker = require('../src/chunker/fixed-size') + +describe('builder', () => { + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + const testMultihashes = Object.keys(mh.names).slice(1, 40) + + it('allows multihash hash algorithm to be specified', (done) => { + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } + + const onCollected = (err, nodes) => { + if (err) return cb(err) + + const node = nodes[0] + expect(node).to.exist() + + const cid = new CID(node.multihash) + + // Verify multihash has been encoded using hashAlg + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + + // Fetch using hashAlg encoded multihash + ipld.get(cid, (err, res) => { + if (err) return cb(err) + const content = UnixFS.unmarshal(res.value.data).data + expect(content.equals(inputFile.content)).to.be.true() + cb() + }) + } + + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) + }) + + it('allows multihash hash algorithm to be specified for big file', function (done) { + this.timeout(30000) + + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + // Bigger than maxChunkSize + content: Buffer.alloc(262144 + 5).fill(1) + } + + const onCollected = (err, nodes) => { + if (err) return cb(err) + + const node = nodes[0] + + try { + expect(node).to.exist() + const cid = new CID(node.multihash) + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + } catch (err) { + return cb(err) + } + + cb() + } + + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) + }) + + it('allows multihash hash algorithm to be specified for a directory', (done) => { + eachSeries(testMultihashes, (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const inputFile = { + path: `${String(Math.random() + Date.now())}-dir`, + content: null + } + + const onCollected = (err, nodes) => { + if (err) return cb(err) + + const node = nodes[0] + + expect(node).to.exist() + + const cid = new CID(node.multihash) + + expect(mh.decode(cid.multihash).name).to.equal(hashAlg) + + // Fetch using hashAlg encoded multihash + ipld.get(cid, (err, res) => { + if (err) return cb(err) + const meta = UnixFS.unmarshal(res.value.data) + expect(meta.type).to.equal('directory') + cb() + }) + } + + pull( + values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipld, options), + collect(onCollected) + ) + }, done) + }) +}) diff --git a/test/chunker-fixed-size.js b/test/chunker-fixed-size.spec.js similarity index 93% rename from test/chunker-fixed-size.js rename to test/chunker-fixed-size.spec.js index c1c5b95..4110e23 100644 --- a/test/chunker-fixed-size.js +++ b/test/chunker-fixed-size.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/fixed-size') +const chunker = require('../src/chunker/fixed-size') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect @@ -11,12 +11,18 @@ const values = require('pull-stream/sources/values') const take = require('pull-stream/throughs/take') const collect = require('pull-stream/sinks/collect') const loadFixture = require('aegir/fixtures') - +const isNode = require('detect-node') const rawFile = loadFixture('test/fixtures/1MiB.txt') describe('chunker: fixed size', function () { this.timeout(30000) + before(function () { + if (!isNode) { + this.skip() + } + }) + it('chunks non flat buffers', (done) => { const b1 = Buffer.alloc(2 * 256) const b2 = Buffer.alloc(1 * 256) diff --git a/test/chunker-rabin-browser.js b/test/chunker-rabin-browser.spec.js similarity index 74% rename from test/chunker-rabin-browser.js rename to test/chunker-rabin-browser.spec.js index 2daf6da..fe4ca62 100644 --- a/test/chunker-rabin-browser.js +++ b/test/chunker-rabin-browser.spec.js @@ -1,16 +1,23 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/rabin') +const chunker = require('../src/chunker/rabin') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const pull = require('pull-stream/pull') const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') +const isNode = require('detect-node') -describe('chunker: rabin browser', function () { - it('returns an error', (done) => { +describe('chunker: rabin browser', () => { + before(function () { + if (isNode) { + this.skip() + } + }) + + it('returns an error', function (done) { const b1 = Buffer.alloc(2 * 256) const b2 = Buffer.alloc(1 * 256) const b3 = Buffer.alloc(5 * 256) diff --git a/test/chunker-rabin.js b/test/chunker-rabin.spec.js similarity index 94% rename from test/chunker-rabin.js rename to test/chunker-rabin.spec.js index 8fbd2d0..29c1dfa 100644 --- a/test/chunker-rabin.js +++ b/test/chunker-rabin.spec.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const chunker = require('./../src/chunker/rabin') +const chunker = require('../src/chunker/rabin') const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect @@ -10,6 +10,7 @@ const values = require('pull-stream/sources/values') const collect = require('pull-stream/sinks/collect') const loadFixture = require('aegir/fixtures') const os = require('os') +const isNode = require('detect-node') const rawFile = loadFixture('test/fixtures/1MiB.txt') @@ -20,6 +21,10 @@ describe('chunker: rabin', function () { if (os.platform() === 'win32') { return this.skip() } + + if (!isNode) { + this.skip() + } }) it('chunks non flat buffers', (done) => { diff --git a/test/hash-parity-with-go-ipfs.js b/test/hash-parity-with-go-ipfs.js deleted file mode 100644 index 66cbb89..0000000 --- a/test/hash-parity-with-go-ipfs.js +++ /dev/null @@ -1,65 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const importer = require('./../src') - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const CID = require('cids') -const Ipld = require('ipld') -const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') - -const strategies = [ - 'flat', - 'trickle', - 'balanced' -] - -const expectedHashes = { - flat: 'QmRgXEDv6DL8uchf7h9j8hAGG8Fq5r1UZ6Jy3TQAPxEb76', - balanced: 'QmVY1TFpjYKSo8LRG9oYgH4iy9AduwDvBGNhqap1Gkxme3', - trickle: 'QmYPsm9oVGjWECkT7KikZmrf8imggqKe8uS8Jco3qfWUCH' -} - -module.exports = (repo) => { - strategies.forEach(strategy => { - const options = { - strategy: strategy - } - - describe('go-ipfs interop using importer:' + strategy, () => { - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('yields the same tree as go-ipfs', function (done) { - this.timeout(10 * 1000) - pull( - values([ - { - path: 'big.dat', - content: randomByteStream(45900000, 7382) - } - ]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.be.equal(1) - - const file = files[0] - expect(new CID(file.multihash).toBaseEncodedString()).to.be.equal(expectedHashes[strategy]) - done() - }) - ) - }) - }) - }) -} diff --git a/test/hash-parity-with-go-ipfs.spec.js b/test/hash-parity-with-go-ipfs.spec.js new file mode 100644 index 0000000..7d4271d --- /dev/null +++ b/test/hash-parity-with-go-ipfs.spec.js @@ -0,0 +1,67 @@ +/* eslint-env mocha */ +'use strict' + +const importer = require('../src') + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const collect = require('pull-stream/sinks/collect') +const CID = require('cids') +const IPLD = require('ipld') +const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream') + +const strategies = [ + 'flat', + 'trickle', + 'balanced' +] + +const expectedHashes = { + flat: 'QmRgXEDv6DL8uchf7h9j8hAGG8Fq5r1UZ6Jy3TQAPxEb76', + balanced: 'QmVY1TFpjYKSo8LRG9oYgH4iy9AduwDvBGNhqap1Gkxme3', + trickle: 'QmYPsm9oVGjWECkT7KikZmrf8imggqKe8uS8Jco3qfWUCH' +} + +strategies.forEach(strategy => { + const options = { + strategy: strategy + } + + describe('go-ipfs interop using importer:' + strategy, () => { + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('yields the same tree as go-ipfs', function (done) { + this.timeout(10 * 1000) + pull( + values([ + { + path: 'big.dat', + content: randomByteStream(45900000, 7382) + } + ]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.be.equal(1) + + const file = files[0] + expect(new CID(file.multihash).toBaseEncodedString()).to.be.equal(expectedHashes[strategy]) + done() + }) + ) + }) + }) +}) diff --git a/test/import-export-nested-dir.js b/test/import-export-nested-dir.js deleted file mode 100644 index 845bd07..0000000 --- a/test/import-export-nested-dir.js +++ /dev/null @@ -1,127 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const map = require('async/map') -const CID = require('cids') - -const importer = require('./../src') -const exporter = require('ipfs-unixfs-exporter') - -module.exports = (repo) => { - describe('import and export: directory', () => { - const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('imports', function (done) { - this.timeout(20 * 1000) - - pull( - values([ - { path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) }, - { path: 'a/b/c/d/f', content: pull.values([Buffer.from('strawberry')]) }, - { path: 'a/b/g', content: pull.values([Buffer.from('ice')]) }, - { path: 'a/b/h', content: pull.values([Buffer.from('cream')]) } - ]), - importer(ipld), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.map(normalizeNode).sort(byPath)).to.be.eql([ - { path: 'a/b/h', - multihash: 'QmWHMpCtdNjemT2F3SjyrmnBXQXwEohaZd4apcbFBhbFRC' }, - { path: 'a/b/g', - multihash: 'QmQGwYzzTPcbqTiy2Nbp88gqqBqCWY4QZGfen45LFZkD5n' }, - { path: 'a/b/c/d/f', - multihash: 'QmNVHs2dy7AjGUotsubWVncRsD3SpRXm8MgmCCQTVdVACz' }, - { path: 'a/b/c/d/e', - multihash: 'QmYPbDKwc7oneCcEc6BcRSN5GXthTGWUCd19bTCyP9u3vH' }, - { path: 'a/b/c/d', - multihash: 'QmQGDXr3ysARM38n7h79Tx7yD3YxuzcnZ1naG71WMojPoj' }, - { path: 'a/b/c', - multihash: 'QmYTVcjYpN3hQLtJstCPE8hhEacAYjWAuTmmAAXoonamuE' }, - { path: 'a/b', - multihash: 'QmWyWYxq1GD9fEyckf5LrJv8hMW35CwfWwzDBp8bTw3NQj' }, - { path: 'a', - multihash: rootHash } - ]) - done() - }) - ) - }) - - it('exports', function (done) { - this.timeout(20 * 1000) - - pull( - exporter(rootHash, ipld), - collect((err, files) => { - expect(err).to.not.exist() - map( - files, - (file, callback) => { - if (file.content) { - pull( - file.content, - collect(mapFile(file, callback)) - ) - } else { - callback(null, { path: file.path }) - } - }, - (err, files) => { - expect(err).to.not.exist() - expect(files.filter(fileHasContent).sort(byPath)).to.eql([ - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/h', - content: 'cream' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/g', - content: 'ice' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/f', - content: 'strawberry' }, - { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/e', - content: 'banana' } - ]) - done() - }) - }) - ) - - function mapFile (file, callback) { - return (err, fileContent) => { - callback(err, fileContent && { - path: file.path, - content: fileContent.toString() - }) - } - } - }) - }) -} - -function normalizeNode (node) { - return { - path: node.path, - multihash: new CID(node.multihash).toBaseEncodedString() - } -} - -function fileHasContent (file) { - return Boolean(file.content) -} - -function byPath (a, b) { - if (a.path > b.path) return -1 - if (a.path < b.path) return 1 - return 0 -} diff --git a/test/import-export-nested-dir.spec.js b/test/import-export-nested-dir.spec.js new file mode 100644 index 0000000..ec54cec --- /dev/null +++ b/test/import-export-nested-dir.spec.js @@ -0,0 +1,129 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const IPLD = require('ipld') +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const collect = require('pull-stream/sinks/collect') +const map = require('async/map') +const CID = require('cids') + +const importer = require('../src') +const exporter = require('ipfs-unixfs-exporter') + +describe('import and export: directory', () => { + const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('imports', function (done) { + this.timeout(20 * 1000) + + pull( + values([ + { path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) }, + { path: 'a/b/c/d/f', content: pull.values([Buffer.from('strawberry')]) }, + { path: 'a/b/g', content: pull.values([Buffer.from('ice')]) }, + { path: 'a/b/h', content: pull.values([Buffer.from('cream')]) } + ]), + importer(ipld), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.map(normalizeNode).sort(byPath)).to.be.eql([ + { path: 'a/b/h', + multihash: 'QmWHMpCtdNjemT2F3SjyrmnBXQXwEohaZd4apcbFBhbFRC' }, + { path: 'a/b/g', + multihash: 'QmQGwYzzTPcbqTiy2Nbp88gqqBqCWY4QZGfen45LFZkD5n' }, + { path: 'a/b/c/d/f', + multihash: 'QmNVHs2dy7AjGUotsubWVncRsD3SpRXm8MgmCCQTVdVACz' }, + { path: 'a/b/c/d/e', + multihash: 'QmYPbDKwc7oneCcEc6BcRSN5GXthTGWUCd19bTCyP9u3vH' }, + { path: 'a/b/c/d', + multihash: 'QmQGDXr3ysARM38n7h79Tx7yD3YxuzcnZ1naG71WMojPoj' }, + { path: 'a/b/c', + multihash: 'QmYTVcjYpN3hQLtJstCPE8hhEacAYjWAuTmmAAXoonamuE' }, + { path: 'a/b', + multihash: 'QmWyWYxq1GD9fEyckf5LrJv8hMW35CwfWwzDBp8bTw3NQj' }, + { path: 'a', + multihash: rootHash } + ]) + done() + }) + ) + }) + + it('exports', function (done) { + this.timeout(20 * 1000) + + pull( + exporter(rootHash, ipld), + collect((err, files) => { + expect(err).to.not.exist() + map( + files, + (file, callback) => { + if (file.content) { + pull( + file.content, + collect(mapFile(file, callback)) + ) + } else { + callback(null, { path: file.path }) + } + }, + (err, files) => { + expect(err).to.not.exist() + expect(files.filter(fileHasContent).sort(byPath)).to.eql([ + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/h', + content: 'cream' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/g', + content: 'ice' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/f', + content: 'strawberry' }, + { path: 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK/b/c/d/e', + content: 'banana' } + ]) + done() + }) + }) + ) + + function mapFile (file, callback) { + return (err, fileContent) => { + callback(err, fileContent && { + path: file.path, + content: fileContent.toString() + }) + } + } + }) +}) + +function normalizeNode (node) { + return { + path: node.path, + multihash: new CID(node.multihash).toBaseEncodedString() + } +} + +function fileHasContent (file) { + return Boolean(file.content) +} + +function byPath (a, b) { + if (a.path > b.path) return -1 + if (a.path < b.path) return 1 + return 0 +} diff --git a/test/import-export.js b/test/import-export.js deleted file mode 100644 index 9e6733c..0000000 --- a/test/import-export.js +++ /dev/null @@ -1,77 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 5] */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const concat = require('pull-stream/sinks/concat') -const flatten = require('pull-stream/throughs/flatten') -const map = require('pull-stream/throughs/map') -const collect = require('pull-stream/sinks/collect') -const loadFixture = require('aegir/fixtures') -const bigFile = loadFixture('test/fixtures/1.2MiB.txt') - -const importer = require('./../src') -const exporter = require('ipfs-unixfs-exporter') - -const strategies = [ - 'flat', - 'balanced', - 'trickle' -] - -function fileEql (f1, fileData, callback) { - pull( - f1.content, - concat((err, data) => { - expect(err).to.not.exist() - // TODO: eql is super slow at comparing large buffers - // expect(data).to.eql(fileData) - callback() - }) - ) -} - -module.exports = (repo) => { - describe('import and export', function () { - this.timeout(30 * 1000) - - strategies.forEach((strategy) => { - const importerOptions = { strategy: strategy } - - describe('using builder: ' + strategy, () => { - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('import and export', (done) => { - const path = strategy + '-big.dat' - - pull( - values([{ path: path, content: values(bigFile) }]), - importer(ipld, importerOptions), - map((file) => { - expect(file.path).to.eql(path) - - return exporter(file.multihash, ipld) - }), - flatten(), - collect((err, files) => { - expect(err).to.not.exist() - expect(files[0].size).to.eql(bigFile.length) - fileEql(files[0], bigFile, done) - }) - ) - }) - }) - }) - }) -} diff --git a/test/import-export.spec.js b/test/import-export.spec.js new file mode 100644 index 0000000..402c302 --- /dev/null +++ b/test/import-export.spec.js @@ -0,0 +1,79 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 5] */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const IPLD = require('ipld') +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const concat = require('pull-stream/sinks/concat') +const flatten = require('pull-stream/throughs/flatten') +const map = require('pull-stream/throughs/map') +const collect = require('pull-stream/sinks/collect') +const loadFixture = require('aegir/fixtures') +const bigFile = loadFixture('test/fixtures/1.2MiB.txt') + +const importer = require('../src') +const exporter = require('ipfs-unixfs-exporter') + +const strategies = [ + 'flat', + 'balanced', + 'trickle' +] + +function fileEql (f1, fileData, callback) { + pull( + f1.content, + concat((err, data) => { + expect(err).to.not.exist() + // TODO: eql is super slow at comparing large buffers + // expect(data).to.eql(fileData) + callback() + }) + ) +} + +describe('import and export', function () { + this.timeout(30 * 1000) + + strategies.forEach((strategy) => { + const importerOptions = { strategy: strategy } + + describe('using builder: ' + strategy, () => { + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('import and export', (done) => { + const path = strategy + '-big.dat' + + pull( + values([{ path: path, content: values(bigFile) }]), + importer(ipld, importerOptions), + map((file) => { + expect(file.path).to.eql(path) + + return exporter(file.multihash, ipld) + }), + flatten(), + collect((err, files) => { + expect(err).to.not.exist() + expect(files[0].size).to.eql(bigFile.length) + fileEql(files[0], bigFile, done) + }) + ) + }) + }) + }) +}) diff --git a/test/importer-flush.js b/test/importer-flush.js deleted file mode 100644 index 595ee0a..0000000 --- a/test/importer-flush.js +++ /dev/null @@ -1,201 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const createImporter = require('./../src') - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const pull = require('pull-stream/pull') -const values = require('pull-stream/sources/values') -const map = require('pull-stream/throughs/map') -const collect = require('pull-stream/sinks/collect') -const pushable = require('pull-pushable') - -module.exports = (repo) => { - describe('importer: flush', () => { - let ipld - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('can push a single root file and flush yields no dirs', (done) => { - const source = pushable() - const importer = createImporter(ipld) - pull( - source, - importer, - map(node => { - expect(node.path).to.be.eql('a') - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.be.eql(1) - done() - }) - ) - - source.push({ - path: 'a', - content: values([Buffer.from('hey')]) - }) - - importer.flush((err, hash) => { - expect(err).to.not.exist() - expect(Buffer.isBuffer(hash)).to.be.true() - source.end() - }) - }) - - it('can push a nested file and flush yields parent dir', (done) => { - const source = pushable() - const importer = createImporter(ipld) - let count = 0 - pull( - source, - importer, - map(function (node) { - count++ - if (count === 1) { - expect(node.path).to.be.eql('b/c') - } else if (count === 2) { - expect(node.path).to.be.eql('b') - } - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(count).to.be.eql(2) - done() - }) - ) - - source.push({ - path: 'b/c', - content: values([Buffer.from('hey')]) - }) - - importer.flush((err, hash) => { - expect(err).to.not.exist() - expect(Buffer.isBuffer(hash)).to.be.true() - source.end() - }) - }) - - it('can flush many times, always coherent', (done) => { - const maxDepth = 4 - const maxEntriesPerDir = 3 - - let count = 0 - const tree = { children: {}, path: '', depth: 0, yielded: true } - let currentDir = tree - - const source = pushable() - const importer = createImporter(ipld) - - pull( - source, - importer, - map((node) => { - count++ - markDirAsYielded(node) - return node - }), - collect((err, files) => { - expect(err).to.not.exist() - expect(count).to.be.eql(2) - done() - }) - ) - - pushAndFlush() - - function pushAndFlush () { - const childCount = Object.keys(currentDir.children).length - const newDirName = childCount.toString() - const dirPath = currentDir.path + (currentDir.depth > 0 ? '/' : '') + newDirName - const newDir = { - children: {}, - path: dirPath, - depth: currentDir.depth + 1, - yielded: false, - parent: currentDir - } - currentDir.children[newDirName] = newDir - markAncestorsAsDirty(currentDir) - - const filePath = dirPath + '/filename' - const file = { - path: filePath, - content: values([Buffer.from('file with path ' + filePath)]) - } - source.push(file) - if (currentDir.depth === 0 || childCount + 1 === maxEntriesPerDir) { - currentDir = newDir - } - importer.flush((err, hash) => { - expect(err).to.not.exist() - expect(Buffer.isBuffer(hash)).to.be.true() - testAllYielded(tree) - if (currentDir.depth < maxDepth) { - pushAndFlush() - } else { - expect(count).to.be.eql(38) - done() - } - }) - } - - function markDirAsYielded (node) { - const dir = findDir(tree, node.path) - if (node.path === dir.path) { - expect(dir.yielded).to.be.false() - dir.yielded = true - } - } - - function findDir (tree, path) { - const pathElems = path.split('/').filter(notEmpty) - const child = tree.children[pathElems.shift()] - if (!child) { - return tree - } - if (pathElems.length) { - return findDir(child, pathElems.join('/')) - } else { - return child - } - } - - function testAllYielded (tree) { - if (tree.depth) { - expect(tree.yielded).to.be.true() - } - const childrenNames = Object.keys(tree.children) - childrenNames.forEach((childName) => { - const child = tree.children[childName] - testAllYielded(child) - }) - } - - function markAncestorsAsDirty (dir) { - dir.yielded = false - while (dir) { - dir = dir.parent - if (dir) { - dir.yielded = false - } - } - } - }) - }) -} - -function notEmpty (str) { - return Boolean(str) -} diff --git a/test/importer-flush.spec.js b/test/importer-flush.spec.js new file mode 100644 index 0000000..e0dd6c8 --- /dev/null +++ b/test/importer-flush.spec.js @@ -0,0 +1,203 @@ +/* eslint-env mocha */ +'use strict' + +const createImporter = require('../src') + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const IPLD = require('ipld') +const pull = require('pull-stream/pull') +const values = require('pull-stream/sources/values') +const map = require('pull-stream/throughs/map') +const collect = require('pull-stream/sinks/collect') +const pushable = require('pull-pushable') + +describe('importer: flush', () => { + let ipld + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('can push a single root file and flush yields no dirs', (done) => { + const source = pushable() + const importer = createImporter(ipld) + pull( + source, + importer, + map(node => { + expect(node.path).to.be.eql('a') + return node + }), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.be.eql(1) + done() + }) + ) + + source.push({ + path: 'a', + content: values([Buffer.from('hey')]) + }) + + importer.flush((err, hash) => { + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() + source.end() + }) + }) + + it('can push a nested file and flush yields parent dir', (done) => { + const source = pushable() + const importer = createImporter(ipld) + let count = 0 + pull( + source, + importer, + map(function (node) { + count++ + if (count === 1) { + expect(node.path).to.be.eql('b/c') + } else if (count === 2) { + expect(node.path).to.be.eql('b') + } + return node + }), + collect((err, files) => { + expect(err).to.not.exist() + expect(count).to.be.eql(2) + done() + }) + ) + + source.push({ + path: 'b/c', + content: values([Buffer.from('hey')]) + }) + + importer.flush((err, hash) => { + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() + source.end() + }) + }) + + it('can flush many times, always coherent', (done) => { + const maxDepth = 4 + const maxEntriesPerDir = 3 + + let count = 0 + const tree = { children: {}, path: '', depth: 0, yielded: true } + let currentDir = tree + + const source = pushable() + const importer = createImporter(ipld) + + pull( + source, + importer, + map((node) => { + count++ + markDirAsYielded(node) + return node + }), + collect((err, files) => { + expect(err).to.not.exist() + expect(count).to.be.eql(2) + done() + }) + ) + + pushAndFlush() + + function pushAndFlush () { + const childCount = Object.keys(currentDir.children).length + const newDirName = childCount.toString() + const dirPath = currentDir.path + (currentDir.depth > 0 ? '/' : '') + newDirName + const newDir = { + children: {}, + path: dirPath, + depth: currentDir.depth + 1, + yielded: false, + parent: currentDir + } + currentDir.children[newDirName] = newDir + markAncestorsAsDirty(currentDir) + + const filePath = dirPath + '/filename' + const file = { + path: filePath, + content: values([Buffer.from('file with path ' + filePath)]) + } + source.push(file) + if (currentDir.depth === 0 || childCount + 1 === maxEntriesPerDir) { + currentDir = newDir + } + importer.flush((err, hash) => { + expect(err).to.not.exist() + expect(Buffer.isBuffer(hash)).to.be.true() + testAllYielded(tree) + if (currentDir.depth < maxDepth) { + pushAndFlush() + } else { + expect(count).to.be.eql(38) + done() + } + }) + } + + function markDirAsYielded (node) { + const dir = findDir(tree, node.path) + if (node.path === dir.path) { + expect(dir.yielded).to.be.false() + dir.yielded = true + } + } + + function findDir (tree, path) { + const pathElems = path.split('/').filter(notEmpty) + const child = tree.children[pathElems.shift()] + if (!child) { + return tree + } + if (pathElems.length) { + return findDir(child, pathElems.join('/')) + } else { + return child + } + } + + function testAllYielded (tree) { + if (tree.depth) { + expect(tree.yielded).to.be.true() + } + const childrenNames = Object.keys(tree.children) + childrenNames.forEach((childName) => { + const child = tree.children[childName] + testAllYielded(child) + }) + } + + function markAncestorsAsDirty (dir) { + dir.yielded = false + while (dir) { + dir = dir.parent + if (dir) { + dir.yielded = false + } + } + } + }) +}) + +function notEmpty (str) { + return Boolean(str) +} diff --git a/test/importer.js b/test/importer.js deleted file mode 100644 index 71c45a8..0000000 --- a/test/importer.js +++ /dev/null @@ -1,728 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const importer = require('./../src') -const exporter = require('ipfs-unixfs-exporter') - -const extend = require('deep-extend') -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const spy = require('sinon/lib/sinon/spy') -const BlockService = require('ipfs-block-service') -const pull = require('pull-stream/pull') -const once = require('pull-stream/sources/once') -const values = require('pull-stream/sources/values') -const collect = require('pull-stream/sinks/collect') -const CID = require('cids') -const Ipld = require('ipld') -const loadFixture = require('aegir/fixtures') -const each = require('async/each') -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') -const UnixFs = require('ipfs-unixfs') -const collectLeafCids = require('./helpers/collect-leaf-cids') - -function stringifyMh (files) { - return files.map((file) => { - file.multihash = new CID(file.multihash).toBaseEncodedString() - return file - }) -} - -const bigFile = loadFixture('test/fixtures/1.2MiB.txt') -const smallFile = loadFixture('test/fixtures/200Bytes.txt') - -const baseFiles = { - '200Bytes.txt': { - path: '200Bytes.txt', - multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 211, - name: '', - leafSize: 200 - }, - '1.2MiB.txt': { - path: '1.2MiB.txt', - multihash: 'QmbPN6CXXWpejfQgnRYnMQcVYkFHEntHWqLNQjbkatYCh1', - size: 1328062, - name: '', - leafSize: 1258000 - } -} - -const strategyBaseFiles = { - flat: baseFiles, - balanced: extend({}, baseFiles, { - '1.2MiB.txt': { - multihash: 'QmeEGqUisUD2T6zU96PrZnCkHfXCGuQeGWKu4UoSuaZL3d', - size: 1335420 - } - }), - trickle: extend({}, baseFiles, { - '1.2MiB.txt': { - multihash: 'QmaiSohNUt1rBf2Lqz6ou54NHVPTbXbBoPuq9td4ekcBx4', - size: 1334599 - } - }) -} - -const strategies = [ - 'flat', - 'balanced', - 'trickle' -] - -const strategyOverrides = { - balanced: { - 'foo-big': { - path: 'foo-big', - multihash: 'QmQ1S6eEamaf4t948etp8QiYQ9avrKCogiJnPRgNkVreLv', - size: 1335478 - }, - pim: { - multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', - size: 1335744 - }, - 'pam/pum': { - multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', - size: 1335744 - }, - pam: { - multihash: 'QmVoVD4fEWFLJLjvRCg4bGrziFhgECiaezp79AUfhuLgno', - size: 2671269 - } - }, - trickle: { - 'foo-big': { - path: 'foo-big', - multihash: 'QmPh6KSS7ghTqzgWhaoCiLoHFPF7HGqUxx7q9vcM5HUN4U', - size: 1334657 - }, - pim: { - multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', - size: 1334923 - }, - 'pam/pum': { - multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', - size: 1334923 - }, - pam: { - multihash: 'QmZTJah1xpG9X33ZsPtDEi1tYSHGDqQMRHsGV5xKzAR2j4', - size: 2669627 - } - } -} - -const checkLeafNodeTypes = (ipld, options, expected, done) => { - waterfall([ - (cb) => pull( - once({ - path: '/foo', - content: Buffer.alloc(262144 + 5).fill(1) - }), - importer(ipld, options), - collect(cb) - ), - (files, cb) => ipld.get(new CID(files[0].multihash), cb), - (result, cb) => { - const node = result.value - const meta = UnixFs.unmarshal(node.data) - - expect(meta.type).to.equal('file') - expect(node.links.length).to.equal(2) - - parallel( - node.links.map(link => { - return (done) => { - waterfall([ - (next) => ipld.get(link.cid, next), - (result, next) => { - const node = result.value - const meta = UnixFs.unmarshal(node.data) - - expect(meta.type).to.equal(expected) - - next() - } - ], done) - } - }), cb) - } - ], done) -} - -const checkNodeLinks = (ipld, options, expected, done) => { - waterfall([ - (cb) => pull( - pull.once({ - path: '/foo', - content: Buffer.alloc(100).fill(1) - }), - importer(ipld, options), - collect(cb) - ), - (files, cb) => ipld.get(new CID(files[0].multihash), cb), - (result, cb) => { - const node = result.value - const meta = UnixFs.unmarshal(node.data) - - expect(meta.type).to.equal('file') - expect(node.links.length).to.equal(expected) - - cb() - } - ], done) -} - -module.exports = (repo) => { - strategies.forEach((strategy) => { - const baseFiles = strategyBaseFiles[strategy] - const defaultResults = extend({}, baseFiles, { - 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { - path: 'foo/bar/200Bytes.txt' - }), - foo: { - path: 'foo', - multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', - size: 320 - }, - 'foo/bar': { - path: 'foo/bar', - multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', - size: 270 - }, - 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { - path: 'foo-big/1.2MiB.txt' - }), - 'foo-big': { - path: 'foo-big', - multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', - size: 1328120 - }, - 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { - path: 'pim/200Bytes.txt' - }), - 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { - path: 'pim/1.2MiB.txt' - }), - pim: { - path: 'pim', - multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', - size: 1328386 - }, - 'empty-dir': { - path: 'empty-dir', - multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - size: 4 - }, - 'pam/pum': { - multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', - size: 1328386 - }, - pam: { - multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', - size: 2656553 - }, - '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { - multihash: 'zb2rhXrz1gkCv8p4nUDZRohY6MzBE9C3HVTVDP72g6Du3SD9Q', - size: 200 - }) - }, strategyOverrides[strategy]) - - const expected = extend({}, defaultResults, strategies[strategy]) - - describe('importer: ' + strategy, function () { - this.timeout(30 * 1000) - - let ipld - - const options = { - strategy: strategy, - maxChildrenPerNode: 10, - chunkerOptions: { - maxChunkSize: 1024 - } - } - - before(() => { - const bs = new BlockService(repo) - ipld = new Ipld({ blockService: bs }) - }) - - it('fails on bad input', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: 'banana' - }]), - importer(ipld, options), - pull.onEnd((err) => { - expect(err).to.exist() - done() - }) - ) - }) - - it('doesn\'t yield anything on empty source', (done) => { - pull( - pull.empty(), - importer(ipld, options), - collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(0) - done() - })) - }) - - it('doesn\'t yield anything on empty file', (done) => { - pull( - values([{ - path: 'emptyfile', - content: pull.empty() - }]), - importer(ipld, options), - collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(1) - - // always yield empty node - expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH') - done() - })) - }) - - it('fails on more than one root', (done) => { - pull( - values([ - { - path: '/beep/200Bytes.txt', - content: values([smallFile]) - }, - { - path: '/boop/200Bytes.txt', - content: values([bigFile]) - } - ]), - importer(ipld, options), - pull.onEnd((err) => { - expect(err).to.exist() - expect(err.message).to.be.eql('detected more than one root') - done() - }) - ) - }) - - it('small file with an escaped slash in the title', (done) => { - const filePath = `small-\\/file-${Math.random()}.txt` - - pull( - values([{ - path: filePath, - content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(filePath) - done() - }) - ) - }) - - it('small file with square brackets in the title', (done) => { - const filePath = `small-[v]-file-${Math.random()}.txt` - - pull( - values([{ - path: filePath, - content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(filePath) - done() - }) - ) - }) - - it('small file (smaller than a chunk)', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) - done() - }) - ) - }) - - it('small file (smaller than a chunk) with raw leaves', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, Object.assign({}, options, { rawLeaves: true })), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt with raw leaves']]) - done() - }) - ) - }) - - it('small file as buffer (smaller than a chunk)', (done) => { - pull( - values([{ - path: '200Bytes.txt', - content: smallFile - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) - done() - }) - ) - }) - - it('small file (smaller than a chunk) inside a dir', (done) => { - pull( - values([{ - path: 'foo/bar/200Bytes.txt', - content: values([smallFile]) - }]), - importer(ipld, options), - collect(collected) - ) - - function collected (err, files) { - expect(err).to.not.exist() - expect(files.length).to.equal(3) - stringifyMh(files).forEach((file) => { - if (file.path === 'foo/bar/200Bytes.txt') { - expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) - } - if (file.path === 'foo') { - expect(file).to.be.eql(expected.foo) - } - if (file.path === 'foo/bar') { - expect(file).to.be.eql(expected['foo/bar']) - } - }) - done() - } - }) - - it('file bigger than a single chunk', function (done) { - this.timeout(60 * 1000) - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) - done() - }) - ) - }) - - it('file bigger than a single chunk inside a dir', function (done) { - this.timeout(60 * 1000) - pull( - values([{ - path: 'foo-big/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - - expect(stringifyMh(files)).to.be.eql([ - expected['foo-big/1.2MiB.txt'], - expected['foo-big'] - ]) - - done() - }) - ) - }) - - it('empty directory', (done) => { - pull( - values([{ - path: 'empty-dir' - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - - expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) - - done() - }) - ) - }) - - it('directory with files', (done) => { - pull( - values([{ - path: 'pim/200Bytes.txt', - content: values([smallFile]) - }, { - path: 'pim/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - - expect(stringifyMh(files)).be.eql([ - expected['pim/200Bytes.txt'], - expected['pim/1.2MiB.txt'], - expected.pim] - ) - - done() - }) - ) - }) - - it('nested directory (2 levels deep)', (done) => { - pull( - values([{ - path: 'pam/pum/200Bytes.txt', - content: values([smallFile]) - }, { - path: 'pam/pum/1.2MiB.txt', - content: values([bigFile]) - }, { - path: 'pam/1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((err, files) => { - expect(err).to.not.exist() - - // need to sort as due to parallel storage the order - // can vary - stringifyMh(files).forEach(eachFile) - - done() - }) - ) - - function eachFile (file) { - if (file.path === 'pam/pum/200Bytes.txt') { - expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) - expect(file.size).to.be.eql(expected['200Bytes.txt'].size) - } - if (file.path === 'pam/pum/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) - expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) - } - if (file.path === 'pam/pum') { - const dir = expected['pam/pum'] - expect(file.multihash).to.be.eql(dir.multihash) - expect(file.size).to.be.eql(dir.size) - } - if (file.path === 'pam/1.2MiB.txt') { - expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) - expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) - } - if (file.path === 'pam') { - const dir = expected.pam - expect(file.multihash).to.be.eql(dir.multihash) - expect(file.size).to.be.eql(dir.size) - } - } - }) - - it('will not write to disk if passed "onlyHash" option', (done) => { - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - content: Buffer.from(content) - } - - const options = { - onlyHash: true - } - - const onCollected = (err, files) => { - if (err) return done(err) - - const file = files[0] - expect(file).to.exist() - - ipld.get(new CID(file.multihash), (err) => { - expect(err).to.exist() - done() - }) - } - - pull( - values([inputFile]), - importer(ipld, options), - collect(onCollected) - ) - }) - - it('will call an optional progress function', (done) => { - options.progress = spy() - - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect(() => { - expect(options.progress.called).to.equal(true) - expect(options.progress.args[0][0]).to.equal(1024) - done() - }) - ) - }) - - it('will import files with CID version 1', (done) => { - const createInputFile = (path, size) => { - const name = String(Math.random() + Date.now()) - path = path[path.length - 1] === '/' ? path : path + '/' - return { - path: path + name + '.txt', - content: Buffer.alloc(size).fill(1) - } - } - - const inputFiles = [ - createInputFile('/foo', 10), - createInputFile('/foo', 60), - createInputFile('/foo/bar', 78), - createInputFile('/foo/baz', 200), - // Bigger than maxChunkSize - createInputFile('/foo', 262144 + 45), - createInputFile('/foo/bar', 262144 + 134), - createInputFile('/foo/bar', 262144 + 79), - createInputFile('/foo/bar', 262144 + 876), - createInputFile('/foo/bar', 262144 + 21) - ] - - const options = { - cidVersion: 1, - // Ensures we use DirSharded for the data below - shardSplitThreshold: 3 - } - - const onCollected = (err, files) => { - if (err) return done(err) - - const file = files[0] - expect(file).to.exist() - - each(files, (file, cb) => { - const cid = new CID(file.multihash).toV1() - const inputFile = inputFiles.find(f => f.path === file.path) - - // Just check the intermediate directory can be retrieved - if (!inputFile) { - return ipld.get(cid, cb) - } - - // Check the imported content is correct - pull( - exporter(cid, ipld), - collect((err, nodes) => { - expect(err).to.not.exist() - pull( - nodes[0].content, - collect((err, chunks) => { - expect(err).to.not.exist() - expect(Buffer.concat(chunks)).to.deep.equal(inputFile.content) - cb() - }) - ) - }) - ) - }, done) - } - - pull( - // Pass a copy of inputFiles, since the importer mutates them - values(inputFiles.map(f => Object.assign({}, f))), - importer(ipld, options), - collect(onCollected) - ) - }) - - it('imports file with raw leaf nodes when specified', (done) => { - checkLeafNodeTypes(ipld, { - leafType: 'raw' - }, 'raw', done) - }) - - it('imports file with file leaf nodes when specified', (done) => { - checkLeafNodeTypes(ipld, { - leafType: 'file' - }, 'file', done) - }) - - it('reduces file to single node when specified', (done) => { - checkNodeLinks(ipld, { - reduceSingleLeafToSelf: true - }, 0, done) - }) - - it('does not reduce file to single node when overidden by options', (done) => { - checkNodeLinks(ipld, { - reduceSingleLeafToSelf: false - }, 1, done) - }) - - it('uses raw leaf nodes when requested', (done) => { - this.timeout(60 * 1000) - - options.rawLeaves = true - - pull( - values([{ - path: '1.2MiB.txt', - content: values([bigFile]) - }]), - importer(ipld, options), - collect((error, files) => { - expect(error).to.not.exist() - - const node = files[0] - - collectLeafCids(ipld, node.multihash, (error, cids) => { - expect(error).to.be.not.ok() - - const rawNodes = cids - .filter(cid => cid.codec === 'raw') - - expect(rawNodes).to.not.be.empty() - - rawNodes - .forEach(cid => { - expect(cid.version).to.equal(1) - }) - - done() - }) - }) - ) - }) - }) - }) -} diff --git a/test/importer.spec.js b/test/importer.spec.js new file mode 100644 index 0000000..f92f419 --- /dev/null +++ b/test/importer.spec.js @@ -0,0 +1,729 @@ +/* eslint-env mocha */ +'use strict' + +const importer = require('../src') +const exporter = require('ipfs-unixfs-exporter') + +const extend = require('deep-extend') +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const spy = require('sinon/lib/sinon/spy') +const pull = require('pull-stream/pull') +const once = require('pull-stream/sources/once') +const values = require('pull-stream/sources/values') +const collect = require('pull-stream/sinks/collect') +const CID = require('cids') +const IPLD = require('ipld') +const loadFixture = require('aegir/fixtures') +const each = require('async/each') +const waterfall = require('async/waterfall') +const parallel = require('async/parallel') +const UnixFs = require('ipfs-unixfs') +const collectLeafCids = require('./helpers/collect-leaf-cids') + +function stringifyMh (files) { + return files.map((file) => { + file.multihash = new CID(file.multihash).toBaseEncodedString() + return file + }) +} + +const bigFile = loadFixture('test/fixtures/1.2MiB.txt') +const smallFile = loadFixture('test/fixtures/200Bytes.txt') + +const baseFiles = { + '200Bytes.txt': { + path: '200Bytes.txt', + multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', + size: 211, + name: '', + leafSize: 200 + }, + '1.2MiB.txt': { + path: '1.2MiB.txt', + multihash: 'QmbPN6CXXWpejfQgnRYnMQcVYkFHEntHWqLNQjbkatYCh1', + size: 1328062, + name: '', + leafSize: 1258000 + } +} + +const strategyBaseFiles = { + flat: baseFiles, + balanced: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmeEGqUisUD2T6zU96PrZnCkHfXCGuQeGWKu4UoSuaZL3d', + size: 1335420 + } + }), + trickle: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmaiSohNUt1rBf2Lqz6ou54NHVPTbXbBoPuq9td4ekcBx4', + size: 1334599 + } + }) +} + +const strategies = [ + 'flat', + 'balanced', + 'trickle' +] + +const strategyOverrides = { + balanced: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmQ1S6eEamaf4t948etp8QiYQ9avrKCogiJnPRgNkVreLv', + size: 1335478 + }, + pim: { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + 'pam/pum': { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + pam: { + multihash: 'QmVoVD4fEWFLJLjvRCg4bGrziFhgECiaezp79AUfhuLgno', + size: 2671269 + } + }, + trickle: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmPh6KSS7ghTqzgWhaoCiLoHFPF7HGqUxx7q9vcM5HUN4U', + size: 1334657 + }, + pim: { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + 'pam/pum': { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + pam: { + multihash: 'QmZTJah1xpG9X33ZsPtDEi1tYSHGDqQMRHsGV5xKzAR2j4', + size: 2669627 + } + } +} + +const checkLeafNodeTypes = (ipld, options, expected, done) => { + waterfall([ + (cb) => pull( + once({ + path: '/foo', + content: Buffer.alloc(262144 + 5).fill(1) + }), + importer(ipld, options), + collect(cb) + ), + (files, cb) => ipld.get(new CID(files[0].multihash), cb), + (result, cb) => { + const node = result.value + const meta = UnixFs.unmarshal(node.data) + + expect(meta.type).to.equal('file') + expect(node.links.length).to.equal(2) + + parallel( + node.links.map(link => { + return (done) => { + waterfall([ + (next) => ipld.get(link.cid, next), + (result, next) => { + const node = result.value + const meta = UnixFs.unmarshal(node.data) + + expect(meta.type).to.equal(expected) + + next() + } + ], done) + } + }), cb) + } + ], done) +} + +const checkNodeLinks = (ipld, options, expected, done) => { + waterfall([ + (cb) => pull( + pull.once({ + path: '/foo', + content: Buffer.alloc(100).fill(1) + }), + importer(ipld, options), + collect(cb) + ), + (files, cb) => ipld.get(new CID(files[0].multihash), cb), + (result, cb) => { + const node = result.value + const meta = UnixFs.unmarshal(node.data) + + expect(meta.type).to.equal('file') + expect(node.links.length).to.equal(expected) + + cb() + } + ], done) +} + +strategies.forEach((strategy) => { + const baseFiles = strategyBaseFiles[strategy] + const defaultResults = extend({}, baseFiles, { + 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'foo/bar/200Bytes.txt' + }), + foo: { + path: 'foo', + multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', + size: 320 + }, + 'foo/bar': { + path: 'foo/bar', + multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', + size: 270 + }, + 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'foo-big/1.2MiB.txt' + }), + 'foo-big': { + path: 'foo-big', + multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', + size: 1328120 + }, + 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'pim/200Bytes.txt' + }), + 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'pim/1.2MiB.txt' + }), + pim: { + path: 'pim', + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + 'empty-dir': { + path: 'empty-dir', + multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + size: 4 + }, + 'pam/pum': { + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + pam: { + multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', + size: 2656553 + }, + '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { + multihash: 'zb2rhXrz1gkCv8p4nUDZRohY6MzBE9C3HVTVDP72g6Du3SD9Q', + size: 200 + }) + }, strategyOverrides[strategy]) + + const expected = extend({}, defaultResults, strategies[strategy]) + + describe('importer: ' + strategy, function () { + this.timeout(30 * 1000) + + let ipld + const options = { + strategy: strategy, + maxChildrenPerNode: 10, + chunkerOptions: { + maxChunkSize: 1024 + } + } + + before((done) => { + IPLD.inMemory((err, resolver) => { + expect(err).to.not.exist() + + ipld = resolver + + done() + }) + }) + + it('fails on bad input', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: 'banana' + }]), + importer(ipld, options), + pull.onEnd((err) => { + expect(err).to.exist() + done() + }) + ) + }) + + it('doesn\'t yield anything on empty source', (done) => { + pull( + pull.empty(), + importer(ipld, options), + collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(0) + done() + })) + }) + + it('doesn\'t yield anything on empty file', (done) => { + pull( + values([{ + path: 'emptyfile', + content: pull.empty() + }]), + importer(ipld, options), + collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(1) + + // always yield empty node + expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH') + done() + })) + }) + + it('fails on more than one root', (done) => { + pull( + values([ + { + path: '/beep/200Bytes.txt', + content: values([smallFile]) + }, + { + path: '/boop/200Bytes.txt', + content: values([bigFile]) + } + ]), + importer(ipld, options), + pull.onEnd((err) => { + expect(err).to.exist() + expect(err.message).to.be.eql('detected more than one root') + done() + }) + ) + }) + + it('small file with an escaped slash in the title', (done) => { + const filePath = `small-\\/file-${Math.random()}.txt` + + pull( + values([{ + path: filePath, + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(filePath) + done() + }) + ) + }) + + it('small file with square brackets in the title', (done) => { + const filePath = `small-[v]-file-${Math.random()}.txt` + + pull( + values([{ + path: filePath, + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(filePath) + done() + }) + ) + }) + + it('small file (smaller than a chunk)', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) + + it('small file (smaller than a chunk) with raw leaves', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, Object.assign({}, options, { rawLeaves: true })), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt with raw leaves']]) + done() + }) + ) + }) + + it('small file as buffer (smaller than a chunk)', (done) => { + pull( + values([{ + path: '200Bytes.txt', + content: smallFile + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) + + it('small file (smaller than a chunk) inside a dir', (done) => { + pull( + values([{ + path: 'foo/bar/200Bytes.txt', + content: values([smallFile]) + }]), + importer(ipld, options), + collect(collected) + ) + + function collected (err, files) { + expect(err).to.not.exist() + expect(files.length).to.equal(3) + stringifyMh(files).forEach((file) => { + if (file.path === 'foo/bar/200Bytes.txt') { + expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) + } + if (file.path === 'foo') { + expect(file).to.be.eql(expected.foo) + } + if (file.path === 'foo/bar') { + expect(file).to.be.eql(expected['foo/bar']) + } + }) + done() + } + }) + + it('file bigger than a single chunk', function (done) { + this.timeout(60 * 1000) + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) + done() + }) + ) + }) + + it('file bigger than a single chunk inside a dir', function (done) { + this.timeout(60 * 1000) + pull( + values([{ + path: 'foo-big/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).to.be.eql([ + expected['foo-big/1.2MiB.txt'], + expected['foo-big'] + ]) + + done() + }) + ) + }) + + it('empty directory', (done) => { + pull( + values([{ + path: 'empty-dir' + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) + + done() + }) + ) + }) + + it('directory with files', (done) => { + pull( + values([{ + path: 'pim/200Bytes.txt', + content: values([smallFile]) + }, { + path: 'pim/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).be.eql([ + expected['pim/200Bytes.txt'], + expected['pim/1.2MiB.txt'], + expected.pim] + ) + + done() + }) + ) + }) + + it('nested directory (2 levels deep)', (done) => { + pull( + values([{ + path: 'pam/pum/200Bytes.txt', + content: values([smallFile]) + }, { + path: 'pam/pum/1.2MiB.txt', + content: values([bigFile]) + }, { + path: 'pam/1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((err, files) => { + expect(err).to.not.exist() + + // need to sort as due to parallel storage the order + // can vary + stringifyMh(files).forEach(eachFile) + + done() + }) + ) + + function eachFile (file) { + if (file.path === 'pam/pum/200Bytes.txt') { + expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) + expect(file.size).to.be.eql(expected['200Bytes.txt'].size) + } + if (file.path === 'pam/pum/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam/pum') { + const dir = expected['pam/pum'] + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + if (file.path === 'pam/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam') { + const dir = expected.pam + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + } + }) + + it('will not write to disk if passed "onlyHash" option', (done) => { + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + content: Buffer.from(content) + } + + const options = { + onlyHash: true + } + + const onCollected = (err, files) => { + if (err) return done(err) + + const file = files[0] + expect(file).to.exist() + + ipld.get(new CID(file.multihash), (err) => { + expect(err).to.exist() + done() + }) + } + + pull( + values([inputFile]), + importer(ipld, options), + collect(onCollected) + ) + }) + + it('will call an optional progress function', (done) => { + options.progress = spy() + + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect(() => { + expect(options.progress.called).to.equal(true) + expect(options.progress.args[0][0]).to.equal(1024) + done() + }) + ) + }) + + it('will import files with CID version 1', (done) => { + const createInputFile = (path, size) => { + const name = String(Math.random() + Date.now()) + path = path[path.length - 1] === '/' ? path : path + '/' + return { + path: path + name + '.txt', + content: Buffer.alloc(size).fill(1) + } + } + + const inputFiles = [ + createInputFile('/foo', 10), + createInputFile('/foo', 60), + createInputFile('/foo/bar', 78), + createInputFile('/foo/baz', 200), + // Bigger than maxChunkSize + createInputFile('/foo', 262144 + 45), + createInputFile('/foo/bar', 262144 + 134), + createInputFile('/foo/bar', 262144 + 79), + createInputFile('/foo/bar', 262144 + 876), + createInputFile('/foo/bar', 262144 + 21) + ] + + const options = { + cidVersion: 1, + // Ensures we use DirSharded for the data below + shardSplitThreshold: 3 + } + + const onCollected = (err, files) => { + if (err) return done(err) + + const file = files[0] + expect(file).to.exist() + + each(files, (file, cb) => { + const cid = new CID(file.multihash).toV1() + const inputFile = inputFiles.find(f => f.path === file.path) + + // Just check the intermediate directory can be retrieved + if (!inputFile) { + return ipld.get(cid, cb) + } + + // Check the imported content is correct + pull( + exporter(cid, ipld), + collect((err, nodes) => { + expect(err).to.not.exist() + pull( + nodes[0].content, + collect((err, chunks) => { + expect(err).to.not.exist() + expect(Buffer.concat(chunks)).to.deep.equal(inputFile.content) + cb() + }) + ) + }) + ) + }, done) + } + + pull( + // Pass a copy of inputFiles, since the importer mutates them + values(inputFiles.map(f => Object.assign({}, f))), + importer(ipld, options), + collect(onCollected) + ) + }) + + it('imports file with raw leaf nodes when specified', (done) => { + checkLeafNodeTypes(ipld, { + leafType: 'raw' + }, 'raw', done) + }) + + it('imports file with file leaf nodes when specified', (done) => { + checkLeafNodeTypes(ipld, { + leafType: 'file' + }, 'file', done) + }) + + it('reduces file to single node when specified', (done) => { + checkNodeLinks(ipld, { + reduceSingleLeafToSelf: true + }, 0, done) + }) + + it('does not reduce file to single node when overidden by options', (done) => { + checkNodeLinks(ipld, { + reduceSingleLeafToSelf: false + }, 1, done) + }) + + it('uses raw leaf nodes when requested', (done) => { + this.timeout(60 * 1000) + + options.rawLeaves = true + + pull( + values([{ + path: '1.2MiB.txt', + content: values([bigFile]) + }]), + importer(ipld, options), + collect((error, files) => { + expect(error).to.not.exist() + + const node = files[0] + + collectLeafCids(ipld, node.multihash, (error, cids) => { + expect(error).to.be.not.ok() + + const rawNodes = cids + .filter(cid => cid.codec === 'raw') + + expect(rawNodes).to.not.be.empty() + + rawNodes + .forEach(cid => { + expect(cid.version).to.equal(1) + }) + + done() + }) + }) + ) + }) + }) +}) diff --git a/test/node.js b/test/node.js deleted file mode 100644 index 196394e..0000000 --- a/test/node.js +++ /dev/null @@ -1,61 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const ncp = require('ncp').ncp -const rimraf = require('rimraf') -const path = require('path') -const os = require('os') -const IPFSRepo = require('ipfs-repo') -const mkdirp = require('mkdirp') -const series = require('async/series') - -describe('IPFS UnixFS Engine', () => { - const repoExample = path.join(process.cwd(), 'test', 'test-repo') - const repoTests = path.join(os.tmpdir(), 'unixfs-tests-' + Date.now()) - - const repo = new IPFSRepo(repoTests) - - before((done) => { - const paths = [ - 'test-data/dir-nested/dir-another', - 'test-data/dir-nested/level-1/level-2' - ] - process.env.IPFS_PATH = repoTests - series([ - (cb) => ncp(repoExample, repoTests, cb), - (cb) => repo.open(cb), - (cb) => series(paths.map((p) => (cb) => { - mkdirp(path.join(__dirname, p), cb) - }), cb) - ], done) - }) - - after((done) => { - series([ - (cb) => repo.close(cb), - (cb) => rimraf(repoTests, cb) - ], done) - }) - - // Chunkers - require('./chunker-fixed-size') - require('./chunker-rabin') - - // Graph Builders - require('./builder')(repo) - require('./builder-flat') - require('./builder-balanced') - require('./builder-trickle-dag') - require('./builder-only-hash')(repo) - require('./builder-dir-sharding')(repo) - - // Importer - require('./importer')(repo) - require('./importer-flush')(repo) - - // Other - require('./import-export')(repo) - require('./import-export-nested-dir')(repo) - require('./hash-parity-with-go-ipfs')(repo) - require('./with-dag-api') -}) diff --git a/test/with-dag-api.js b/test/with-dag-api.spec.js similarity index 99% rename from test/with-dag-api.js rename to test/with-dag-api.spec.js index 3f1d8e4..c1a8950 100644 --- a/test/with-dag-api.js +++ b/test/with-dag-api.spec.js @@ -176,8 +176,6 @@ describe('with dag-api', function () { } before(function (done) { - this.timeout(30 * 1000) - IPLD.inMemory((err, resolver) => { if (err) { return done(err)