diff --git a/.travis.yml b/.travis.yml index b5409ef4..1be64a39 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,7 +7,7 @@ matrix: env: CXX=g++-4.8 - node_js: 6 env: - - SAUCE=true + - SAUCE=false - CXX=g++-4.8 - node_js: stable env: CXX=g++-4.8 @@ -34,4 +34,4 @@ addons: sources: - ubuntu-toolchain-r-test packages: - - g++-4.8 \ No newline at end of file + - g++-4.8 diff --git a/README.md b/README.md index 95cc04d0..b9b747a4 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,6 @@ IPFS unixFS Engine ![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) ![](https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square) -[![Sauce Test Status](https://saucelabs.com/browser-matrix/ipfs-unixfs-engine.svg)](https://saucelabs.com/u/ipfs-unixfs-engine) - > JavaScript implementation of the layout and chunking mechanisms used by IPFS ## Table of Contents @@ -29,20 +27,10 @@ IPFS unixFS Engine - [Contribute](#contribute) - [License](#license) -## BEWARE BEWARE BEWARE there might be 🐉 - -This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features: - -- [ ] tar importer -- [x] trickle dag exporter -- [ ] sharding (HAMT) - ## Install -With [npm](https://npmjs.org/) installed, run - ``` -$ npm install ipfs-unixfs-engine +> npm install ipfs-unixfs-engine ``` ## Usage @@ -51,29 +39,16 @@ $ npm install ipfs-unixfs-engine Let's create a little directory to import: ```sh -$ cd /tmp -$ mkdir foo -$ echo 'hello' > foo/bar -$ echo 'world' > foo/quux +> cd /tmp +> mkdir foo +> echo 'hello' > foo/bar +> echo 'world' > foo/quux ``` And write the importing logic: ```js -// Dependencies to create a DAG Service (where the dir will be imported into) -const memStore = require('abstract-blob-store') -const Repo = require('ipfs-repo') -const Block = require('ipfs-block') -const BlockService = require('ipfs-block-service') -const MerkleDag = require('ipfs-merkle-dag') -const fs = require('fs') - -const repo = new Repo('', { stores: memStore }) -const blockService = new BlockService(repo) -const dagService = new ipfsMerkleDag.DAGService(blocks) - - const Importer = require('ipfs-unixfs-engine').Importer -const filesAddStream = new Importer(dagService) +const filesAddStream = new Importer( { - res.push(info) -}) +filesAddStream.on('data', (info) => res.push(info)) // The end event of the stream signals that the importer is done - -filesAddStream.on('end', () => { - console.log('Finished filesAddStreaming files!') -}) +filesAddStream.on('end', () => console.log('Finished filesAddStreaming files!')) // Calling write on the importer to filesAddStream the file/object tuples - filesAddStream.write(input) filesAddStream.write(input2) filesAddStream.end() @@ -129,7 +96,7 @@ When run, the stat of DAG Node is outputted for each file on data event until th ### Importer API ```js -const Importer = require('ipfs-unixfs-engine').importer +const Importer = require('ipfs-unixfs-engine').Importer ``` #### const add = new Importer(dag) @@ -173,24 +140,11 @@ In the second argument of the importer constructor you can specify the following ### Example Exporter ``` -const Repo = require('ipfs-repo') -const Block = require('ipfs-block') -const BlockService = require('ipfs-block-service') -const MerkleDAG = require('ipfs-merkle-dag') - -const repo = new Repo('', { stores: memStore }) -const blockService = new BlockService(repo) -const dagService = new MerkleDag.DAGService(blockService) - // Create an export readable object stream with the hash you want to export and a dag service - -const filesStream = Exporter(, dag) +const filesStream = Exporter(, ) // Pipe the return stream to console - -filesStream.on('data', (file) => { - file.content.pipe(process.stdout) -} +filesStream.on('data', (file) => file.content.pipe(process.stdout)) ``` ### Exporter: API @@ -199,9 +153,9 @@ filesStream.on('data', (file) => { const Exporter = require('ipfs-unixfs-engine').Exporter ``` -### new Exporter(hash, dagService) +### new Exporter(, ) -Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. +Uses the given [dag API or an ipld-resolver instance][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. Creates a new readable stream in object mode that outputs objects of the form @@ -215,7 +169,7 @@ Creates a new readable stream in object mode that outputs objects of the form Errors are received as with a normal stream, by listening on the `'error'` event to be emitted. -[DAG Service]: https://github.com/vijayee/js-ipfs-merkle-dag/ +[IPLD Resolver]: https://github.com/ipld/js-ipld-resolver [UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs ## Contribute diff --git a/package.json b/package.json index 2acabe4e..724a66ef 100644 --- a/package.json +++ b/package.json @@ -39,11 +39,12 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { - "aegir": "^11.0.1", + "aegir": "^11.0.2", "chai": "^3.5.0", "dirty-chai": "^1.2.2", - "ipfs-block-service": "^0.9.0", - "ipfs-repo": "^0.13.0", + "ipfs": "^0.24.0", + "ipfs-block-service": "^0.9.1", + "ipfs-repo": "^0.13.1", "ncp": "^2.0.0", "pre-commit": "^1.2.2", "pull-generate": "^2.2.0", @@ -52,27 +53,27 @@ "split": "^1.0.0" }, "dependencies": { - "async": "^2.1.5", + "async": "^2.4.1", "cids": "^0.5.0", - "deep-extend": "^0.4.1", + "deep-extend": "^0.5.0", "ipfs-unixfs": "^0.1.11", "ipld-dag-pb": "^0.11.0", - "ipld-resolver": "^0.11.0", + "ipld-resolver": "^0.11.1", "is-ipfs": "^0.3.0", "left-pad": "^1.1.3", "lodash": "^4.17.4", "multihashes": "^0.4.5", "multihashing-async": "^0.4.5", "pull-batch": "^1.0.0", - "pull-block": "^1.1.0", + "pull-block": "^1.2.0", "pull-cat": "^1.1.11", "pull-pair": "^1.1.0", - "pull-paramap": "^1.2.1", + "pull-paramap": "^1.2.2", "pull-pause": "0.0.1", - "pull-pushable": "^2.0.1", - "pull-stream": "^3.5.0", + "pull-pushable": "^2.1.1", + "pull-stream": "^3.6.0", "pull-traverse": "^1.0.3", - "pull-write": "^1.1.1", + "pull-write": "^1.1.2", "sparse-array": "^1.3.1" }, "contributors": [ @@ -87,4 +88,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/test/node.js b/test/node.js index fce307cb..7508b997 100644 --- a/test/node.js +++ b/test/node.js @@ -50,4 +50,5 @@ describe('IPFS UnixFS Engine', () => { require('./test-hash-parity-with-go-ipfs')(repo) require('./test-nested-dir-import-export')(repo) require('./test-dirbuilder-sharding')(repo) + require('./test-dag-api') }) diff --git a/test/test-dag-api.js b/test/test-dag-api.js new file mode 100644 index 00000000..9ef37517 --- /dev/null +++ b/test/test-dag-api.js @@ -0,0 +1,426 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ + +'use strict' + +const importer = require('./../src').importer + +const extend = require('deep-extend') +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const pull = require('pull-stream') +const mh = require('multihashes') +const loadFixture = require('aegir/fixtures') +const IPFS = require('ipfs') + +function stringifyMh (files) { + return files.map((file) => { + file.multihash = mh.toB58String(file.multihash) + return file + }) +} + +const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt') +const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') + +const baseFiles = { + '200Bytes.txt': { + path: '200Bytes.txt', + multihash: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', + size: 211, + name: '', + leafSize: 200 + }, + '1.2MiB.txt': { + path: '1.2MiB.txt', + multihash: 'QmbPN6CXXWpejfQgnRYnMQcVYkFHEntHWqLNQjbkatYCh1', + size: 1328062, + name: '', + leafSize: 1258000 + } +} + +const strategyBaseFiles = { + flat: baseFiles, + balanced: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmeEGqUisUD2T6zU96PrZnCkHfXCGuQeGWKu4UoSuaZL3d', + size: 1335420 + } + }), + trickle: extend({}, baseFiles, { + '1.2MiB.txt': { + multihash: 'QmaiSohNUt1rBf2Lqz6ou54NHVPTbXbBoPuq9td4ekcBx4', + size: 1334599 + } + }) +} + +const strategies = [ + 'flat', + 'balanced', + 'trickle' +] + +const strategyOverrides = { + balanced: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmQ1S6eEamaf4t948etp8QiYQ9avrKCogiJnPRgNkVreLv', + size: 1335478 + }, + pim: { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + 'pam/pum': { + multihash: 'QmUpzaN4Jio2GB3HoPSRCMQD5EagdMWjSEGD4SGZXaCw7W', + size: 1335744 + }, + pam: { + multihash: 'QmVoVD4fEWFLJLjvRCg4bGrziFhgECiaezp79AUfhuLgno', + size: 2671269 + } + }, + trickle: { + 'foo-big': { + path: 'foo-big', + multihash: 'QmPh6KSS7ghTqzgWhaoCiLoHFPF7HGqUxx7q9vcM5HUN4U', + size: 1334657 + }, + pim: { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + 'pam/pum': { + multihash: 'QmPAn3G2x2nrq4A1fu2XUpwWtpqG4D1YXFDrU615NHvJbr', + size: 1334923 + }, + pam: { + multihash: 'QmZTJah1xpG9X33ZsPtDEi1tYSHGDqQMRHsGV5xKzAR2j4', + size: 2669627 + } + } + +} + +describe('with dag-api', () => { + strategies.forEach(strategy => { + const baseFiles = strategyBaseFiles[strategy] + const defaultResults = extend({}, baseFiles, { + 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'foo/bar/200Bytes.txt' + }), + foo: { + path: 'foo', + multihash: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', + size: 320 + }, + 'foo/bar': { + path: 'foo/bar', + multihash: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', + size: 270 + }, + 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'foo-big/1.2MiB.txt' + }), + 'foo-big': { + path: 'foo-big', + multihash: 'Qma6JU3FoXU9eAzgomtmYPjzFBwVc2rRbECQpmHFiA98CJ', + size: 1328120 + }, + 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { + path: 'pim/200Bytes.txt' + }), + 'pim/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { + path: 'pim/1.2MiB.txt' + }), + pim: { + path: 'pim', + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + 'empty-dir': { + path: 'empty-dir', + multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + size: 4 + }, + 'pam/pum': { + multihash: 'QmNk8VPGb3fkAQgoxctXo4Wmnr4PayFTASy4MiVXTtXqiA', + size: 1328386 + }, + pam: { + multihash: 'QmPAixYTaYnPe795fcWcuRpo6tfwHgRKNiBHpMzoomDVN6', + size: 2656553 + } + }, strategyOverrides[strategy]) + + const expected = extend({}, defaultResults, strategies[strategy]) + + describe(strategy + ' importer', () => { + let node + + const options = { + strategy: strategy, + maxChildrenPerNode: 10, + chunkerOptions: { + maxChunkSize: 1024 + } + } + + before((done) => { + node = new IPFS({ + repo: '/tmp/unixfs-test-' + Math.random(), + start: false + }) + + node.on('ready', done) + }) + + it('fails on bad input', (done) => { + pull( + pull.values([{ + path: '200Bytes.txt', + content: 'banana' + }]), + importer(node.dag, options), + pull.onEnd((err) => { + expect(err).to.exist() + done() + }) + ) + }) + + it('doesn\'t yield anything on empty source', (done) => { + pull( + pull.empty(), + importer(node.dag, options), + pull.collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(0) + done() + })) + }) + + it('doesn\'t yield anything on empty file', (done) => { + pull( + pull.values([{ + path: 'emptyfile', + content: pull.empty() + }]), + importer(node.dag, options), + pull.collect((err, nodes) => { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(1) + // always yield empty node + expect(mh.toB58String(nodes[0].multihash)).to.be.eql('QmfJMCvenrj4SKKRc48DYPxwVdS44qCUCqqtbqhJuSTWXP') + done() + })) + }) + + it('fails on more than one root', (done) => { + pull( + pull.values([ + { + path: '/beep/200Bytes.txt', + content: pull.values([smallFile]) + }, + { + path: '/boop/200Bytes.txt', + content: pull.values([smallFile]) + } + ]), + importer(node.dag, options), + pull.onEnd((err) => { + expect(err).to.exist() + expect(err.message).to.be.eql('detected more than one root') + done() + }) + ) + }) + + it('small file (smaller than a chunk)', (done) => { + pull( + pull.values([{ + path: '200Bytes.txt', + content: pull.values([smallFile]) + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) + + it('small file as buffer (smaller than a chunk)', (done) => { + pull( + pull.values([{ + path: '200Bytes.txt', + content: smallFile + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']]) + done() + }) + ) + }) + + it('small file (smaller than a chunk) inside a dir', (done) => { + pull( + pull.values([{ + path: 'foo/bar/200Bytes.txt', + content: pull.values([smallFile]) + }]), + importer(node.dag, options), + pull.collect(collected) + ) + + function collected (err, files) { + expect(err).to.not.exist() + expect(files.length).to.equal(3) + stringifyMh(files).forEach((file) => { + if (file.path === 'foo/bar/200Bytes.txt') { + expect(file).to.be.eql(expected['foo/bar/200Bytes.txt']) + } + if (file.path === 'foo') { + expect(file).to.be.eql(expected.foo) + } + if (file.path === 'foo/bar') { + expect(file).to.be.eql(expected['foo/bar']) + } + }) + done() + } + }) + + it('file bigger than a single chunk', (done) => { + pull( + pull.values([{ + path: '1.2MiB.txt', + content: pull.values([bigFile]) + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']]) + done() + }) + ) + }) + + it('file bigger than a single chunk inside a dir', (done) => { + pull( + pull.values([{ + path: 'foo-big/1.2MiB.txt', + content: pull.values([bigFile]) + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).to.be.eql([ + expected['foo-big/1.2MiB.txt'], + expected['foo-big'] + ]) + + done() + }) + ) + }) + + it('empty directory', (done) => { + pull( + pull.values([{ + path: 'empty-dir' + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).to.be.eql([expected['empty-dir']]) + + done() + }) + ) + }) + + it('directory with files', (done) => { + pull( + pull.values([{ + path: 'pim/200Bytes.txt', + content: pull.values([smallFile]) + }, { + path: 'pim/1.2MiB.txt', + content: pull.values([bigFile]) + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + + expect(stringifyMh(files)).be.eql([ + expected['pim/200Bytes.txt'], + expected['pim/1.2MiB.txt'], + expected.pim] + ) + + done() + }) + ) + }) + + it('nested directory (2 levels deep)', (done) => { + pull( + pull.values([{ + path: 'pam/pum/200Bytes.txt', + content: pull.values([smallFile]) + }, { + path: 'pam/pum/1.2MiB.txt', + content: pull.values([bigFile]) + }, { + path: 'pam/1.2MiB.txt', + content: pull.values([bigFile]) + }]), + importer(node.dag, options), + pull.collect((err, files) => { + expect(err).to.not.exist() + + // need to sort as due to parallel storage the order can vary + stringifyMh(files).forEach(eachFile) + + done() + }) + ) + + function eachFile (file) { + if (file.path === 'pam/pum/200Bytes.txt') { + expect(file.multihash).to.be.eql(expected['200Bytes.txt'].multihash) + expect(file.size).to.be.eql(expected['200Bytes.txt'].size) + } + if (file.path === 'pam/pum/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam/pum') { + const dir = expected['pam/pum'] + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + if (file.path === 'pam/1.2MiB.txt') { + expect(file.multihash).to.be.eql(expected['1.2MiB.txt'].multihash) + expect(file.size).to.be.eql(expected['1.2MiB.txt'].size) + } + if (file.path === 'pam') { + const dir = expected.pam + expect(file.multihash).to.be.eql(dir.multihash) + expect(file.size).to.be.eql(dir.size) + } + } + }) + }) + }) +})