From d0485ef9423e6c071fa718993e2f10b39330bd8b Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 28 Jun 2016 09:08:20 +0100 Subject: [PATCH 1/3] update the readme --- README.md | 69 ++++++++++++++++++++++++++----------------------------- 1 file changed, 33 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 596ac1eb..27e91107 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -# IPFS unixFS Engine +IPFS unixFS Engine +================== [![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) @@ -48,19 +49,19 @@ And write the importing logic: ```js // Dependencies to create a DAG Service (where the dir will be imported into) const memStore = require('abstract-blob-store') -const ipfsRepo = require('ipfs-repo') -const ipfsBlock = require('ipfs-block') -const ipfsBlockService = require('ipfs-block-service') -const ipfsMerkleDag = require('ipfs-merkle-dag') +const Repo = require('ipfs-repo') +const Block = require('ipfs-block') +const BlockService = require('ipfs-block-service') +const MerkleDag = require('ipfs-merkle-dag') const fs = require('fs') -const repo = new ipfsRepo('', { stores: memStore }) -const blocks = new ipfsBlockService(repo) -const dag = new ipfsMerkleDag.DAGService(blocks) +const repo = new Repo('', { stores: memStore }) +const blockService = new BlockService(repo) +const dagService = new ipfsMerkleDag.DAGService(blocks) -const Importer = require('ipfs-unixfs-engine').importer -const add = new Importer(dag) +const Importer = require('ipfs-unixfs-engine').Importer +const filesAddStream = new Importer(dagService) // An array to hold the return of nested file/dir info from the importer // A root DAG Node is received upon completion @@ -76,26 +77,24 @@ const input2 = {path: /tmp/foo/quxx, content: rs2} // Listen for the data event from the importer stream -add.on('data', (info) => { +filesAddStream.on('data', (info) => { res.push(info) }) // The end event of the stream signals that the importer is done -add.on('end', () => { - console.log('Finished adding files!') - return +filesAddStream.on('end', () => { + console.log('Finished filesAddStreaming files!') }) -// Calling write on the importer to add the file/object tuples +// Calling write on the importer to filesAddStream the file/object tuples -add.write(input) -add.write(input2) -add.end() +filesAddStream.write(input) +filesAddStream.write(input2) +filesAddStream.end() ``` When run, the stat of DAG Node is outputted for each file on data event until the root: - ``` { multihash: , size: 39243, @@ -143,38 +142,37 @@ Nodes. ### Example Exporter ``` -const ipfsRepo = require('ipfs-repo') -const ipfsBlock = require('ipfs-block') -const ipfsBlockService = require('ipfs-block-service') -const ipfsMerkleDag = require('ipfs-merkle-dag') +const Repo = require('ipfs-repo') +const Block = require('ipfs-block') +const BlockService = require('ipfs-block-service') +const MerkleDAG = require('ipfs-merkle-dag') -const repo = new ipfsRepo('', { stores: memStore }) -const blocks = new ipfsBlockService(repo) -const dag = new ipfsMerkleDag.DAGService(blocks) +const repo = new Repo('', { stores: memStore }) +const blockService = new BlockService(repo) +const dagService = new MerkleDag.DAGService(blockService) // Create an export readable object stream with the hash you want to export and a dag service -const exportEvent = Exporter(hash, dag) +const filesStream = Exporter(, dag) // Pipe the return stream to console -exportEvent.on('data', (result) => { - result.stream.pipe(process.stdout) +filesStream.on('data', (file) => { + file.content.pipe(process.stdout) } ``` ### Exporter: API + ```js -const Exporter = require('ipfs-unixfs-engine').exporter +const Exporter = require('ipfs-unixfs-engine').Exporter ``` ### new Exporter(hash, dagService) -Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their -multiaddress. +Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. -Creates a new readable stream in object mode that outputs objects of the -form +Creates a new readable stream in object mode that outputs objects of the form ```js { @@ -183,8 +181,7 @@ form } ``` -Errors are received as with a normal stream, by listening on the `'error'` event -to be emitted. +Errors are received as with a normal stream, by listening on the `'error'` event to be emitted. [DAG Service]: https://github.com/vijayee/js-ipfs-merkle-dag/ From 250aed41006d2bd388915eeb607d0dcd324bf0fa Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 28 Jun 2016 09:08:32 +0100 Subject: [PATCH 2/3] clean up source --- src/exporter.js | 102 ++++++++++++++++++++---------------------------- src/importer.js | 4 +- 2 files changed, 45 insertions(+), 61 deletions(-) diff --git a/src/exporter.js b/src/exporter.js index 1833cf41..afafef6c 100644 --- a/src/exporter.js +++ b/src/exporter.js @@ -1,10 +1,9 @@ 'use strict' const debug = require('debug') -const log = debug('exporter') -log.err = debug('exporter:error') +const log = debug('unixfs') +log.err = debug('unixfs:error') const isIPFS = require('is-ipfs') -const bs58 = require('bs58') const UnixFS = require('ipfs-unixfs') const series = require('run-series') const Readable = require('readable-stream').Readable @@ -21,13 +20,10 @@ function Exporter (hash, dagService, options) { return new Exporter(hash, dagService, options) } - // Sanitize hash. + // Sanitize hash if (!isIPFS.multihash(hash)) { throw new Error('not valid multihash') } - if (Buffer.isBuffer(hash)) { - hash = bs58.encode(hash) - } Readable.call(this, { objectMode: true }) @@ -36,61 +32,52 @@ function Exporter (hash, dagService, options) { this._read = (n) => {} let fileExporter = (node, name, done) => { - let init = false + if (!done) { + throw new Error('done must be set') + } - if (!done) throw new Error('done must be set') + const contentRS = new Readable() + contentRS._read = () => {} // Logic to export a single (possibly chunked) unixfs file. - var rs = new Readable() if (node.links.length === 0) { const unmarshaledData = UnixFS.unmarshal(node.data) - rs._read = () => { - if (init) { - return - } - init = true - rs.push(unmarshaledData.data) - rs.push(null) - } - this.push({ content: rs, path: name }) + contentRS.push(unmarshaledData.data) + contentRS.push(null) + this.push({ content: contentRS, path: name }) done() } else { - rs._read = () => { - if (init) { - return + const array = node.links.map((link) => { + return (cb) => { + dagService.get(link.hash, (err, res) => { + if (err) { + return cb(err) + } + var unmarshaledData = UnixFS.unmarshal(res.data) + contentRS.push(unmarshaledData.data) + cb() + }) } - init = true - - const array = node.links.map((link) => { - return (cb) => { - dagService.get(link.hash, (err, res) => { - if (err) { - return cb(err) - } - var unmarshaledData = UnixFS.unmarshal(res.data) - rs.push(unmarshaledData.data) - cb() - }) - } - }) - series(array, (err, res) => { - if (err) { - rs.emit('error', err) - return - } - rs.push(null) - return - }) - } - this.push({ content: rs, path: name }) + }) + series(array, (err) => { + if (err) { + return contentRS.emit('error', err) + } + contentRS.push(null) + }) + this.push({ content: contentRS, path: name }) done() } } // Logic to export a unixfs directory. let dirExporter = (node, name, add, done) => { - if (!add) throw new Error('add must be set') - if (!done) throw new Error('done must be set') + if (!add) { + throw new Error('add must be set') + } + if (!done) { + throw new Error('done must be set') + } this.push({content: null, path: name}) @@ -104,32 +91,29 @@ function Exporter (hash, dagService, options) { } // Traverse the DAG asynchronously - var self = this - fieldtrip([{ path: hash, hash: hash }], visit, (err) => { + fieldtrip([{path: hash, hash: hash}], visit.bind(this), (err) => { if (err) { - self.emit('error', err) - return + return this.emit('error', err) } - self.push(null) + this.push(null) }) // Visit function: called once per node in the exported graph function visit (item, add, done) { - dagService.get(item.hash, (err, fetchedNode) => { + dagService.get(item.hash, (err, node) => { if (err) { - self.emit('error', err) - return + return this.emit('error', err) } - const data = UnixFS.unmarshal(fetchedNode.data) + const data = UnixFS.unmarshal(node.data) const type = data.type if (type === 'directory') { - dirExporter(fetchedNode, item.path, add, done) + dirExporter(node, item.path, add, done) } if (type === 'file') { - fileExporter(fetchedNode, item.path, done) + fileExporter(node, item.path, done) } }) } diff --git a/src/importer.js b/src/importer.js index aede8925..019f454a 100644 --- a/src/importer.js +++ b/src/importer.js @@ -1,8 +1,8 @@ 'use strict' const debug = require('debug') -const log = debug('importer') -log.err = debug('importer:error') +const log = debug('unixfs') +log.err = debug('unixfs:error') const fsc = require('./chunker-fixed-size') const through2 = require('through2') const merkleDAG = require('ipfs-merkle-dag') From cacb55cb4b09d5bfdf2e4a561de9a9d20ed1e8b9 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 28 Jun 2016 09:08:47 +0100 Subject: [PATCH 3/3] clean up tests --- test/test-exporter.js | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/test/test-exporter.js b/test/test-exporter.js index abb77456..b7920828 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -88,52 +88,52 @@ module.exports = function (repo) { }) testExport.pipe(concat((files) => { expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') + expect(files[0].content).to.not.exist + expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt') + expect(files[1].content).to.exist + expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another') + expect(files[2].content).to.not.exist + expect(files[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1') + expect(files[3].content).to.not.exist + expect(files[4].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt') + expect(files[4].content).to.exist + expect(files[5].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2') + expect(files[5].content).to.not.exist + done() })) }) it('returns a null stream for dir', (done) => { - const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' // This hash doesn't exist in the repo + const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' const bs = new BlockService(repo) const ds = new DAGService(bs) const testExport = exporter(hash, ds) + testExport.on('error', (err) => { expect(err).to.not.exist }) - testExport.on('data', (dir) => { - expect(dir.content).to.equal(null) + + testExport.on('data', (file) => { + expect(file.content).to.not.exist done() }) }) it('fails on non existent hash', (done) => { - const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' // This hash doesn't exist in the repo + // This hash doesn't exist in the repo + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' const bs = new BlockService(repo) const ds = new DAGService(bs) const testExport = exporter(hash, ds) testExport.on('error', (err) => { - const error = err.toString() expect(err).to.exist - const browser = error.includes('Error: key not found:') - const node = error.includes('no such file or directory') - // the browser and node js return different errors - if (browser) { - expect(error).to.contain('Error: key not found:') - done() - } - if (node) { - expect(error).to.contain('no such file or directory') - done() - } - if (!node && !browser) { - expect(node).to.equal(true) - done() - } + done() }) }) })