Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

feat: add fullPath option #3

Merged
merged 1 commit into from
Nov 23, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 85 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,91 @@ pull(
)
```

### `fullPath`

If specified the exporter will emit an entry for every path component encountered.

```javascript
const exporter = require('ipfs-unixfs-exporter')
const pull = require('pull-stream')
const collect = require('pull-stream/sinks/collect')

pull(
exporter('QmFoo.../bar/baz.txt', ipld, {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For the sake of greater clarity, can we s/ipld/graphService? //cc @vmx

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would go with ipld for now until there's really something other than IPLD.

fullPath: true
})
collect((err, files) => {
console.info(files)

// [{
// depth: 0,
// name: 'QmFoo...',
// path: 'QmFoo...',
// size: ...
// hash: Buffer
// content: undefined
// type: 'dir'
// }, {
// depth: 1,
// name: 'bar',
// path: 'QmFoo.../bar',
// size: ...
// hash: Buffer
// content: undefined
// type: 'dir'
// }, {
// depth: 2,
// name: 'baz.txt',
// path: 'QmFoo.../bar/baz.txt',
// size: ...
// hash: Buffer
// content: <Pull stream>
// type: 'file'
// }]
//
})
)
```

### `maxDepth`

If specified the exporter will only emit entries up to the specified depth.

```javascript
const exporter = require('ipfs-unixfs-exporter')
const pull = require('pull-stream')
const collect = require('pull-stream/sinks/collect')

pull(
exporter('QmFoo.../bar/baz.txt', ipld, {
fullPath: true,
maxDepth: 1
})
collect((err, files) => {
console.info(files)

// [{
// depth: 0,
// name: 'QmFoo...',
// path: 'QmFoo...',
// size: ...
// hash: Buffer
// content: undefined
// type: 'dir'
// }, {
// depth: 1,
// name: 'bar',
// path: 'QmFoo.../bar',
// size: ...
// hash: Buffer
// content: undefined
// type: 'dir'
// }]
//
})
)
```

[dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md
[ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver
[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs
Expand Down
4 changes: 2 additions & 2 deletions src/dir-flat.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const cat = require('pull-cat')
// Logic to export a unixfs directory.
module.exports = dirExporter

function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
const accepts = pathRest[0]

const dir = {
Expand Down Expand Up @@ -37,7 +37,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
]

// place dir before if not specifying subtree
if (!pathRest.length) {
if (!pathRest.length || options.fullPath) {
streams.unshift(pull.values([dir]))
}

Expand Down
4 changes: 2 additions & 2 deletions src/dir-hamt-sharded.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const cat = require('pull-cat')
// Logic to export a unixfs directory.
module.exports = shardedDirExporter

function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
let dir
if (!parent || (parent.path !== path)) {
dir = {
Expand Down Expand Up @@ -49,7 +49,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
)
]

if (!pathRest.length) {
if (!pathRest.length || options.fullPath) {
streams.unshift(pull.values([dir]))
}

Expand Down
5 changes: 4 additions & 1 deletion src/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const paramap = require('pull-paramap')
const extractDataFromBlock = require('./extract-data-from-block')

// Logic to export a single (possibly chunked) unixfs file.
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
const accepts = pathRest[0]

if (accepts !== undefined && accepts !== path) {
Expand All @@ -24,6 +24,9 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d

const fileSize = size || file.fileSize()

let offset = options.offset
let length = options.length

if (offset < 0) {
return pull.error(new Error('Offset must be greater than or equal to 0'))
}
Expand Down
5 changes: 3 additions & 2 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ function pathBaseAndRest (path) {
const defaultOptions = {
maxDepth: Infinity,
offset: undefined,
length: undefined
length: undefined,
fullPath: false
}

module.exports = (path, dag, options) => {
Expand Down Expand Up @@ -70,7 +71,7 @@ module.exports = (path, dag, options) => {
return {
depth: node.depth,
name: node.name,
path: finalPathFor(node),
path: options.fullPath ? node.path : finalPathFor(node),
size: node.size,
hash: node.multihash,
content: node.content,
Expand Down
5 changes: 4 additions & 1 deletion src/raw.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const pull = require('pull-stream')
const extractDataFromBlock = require('./extract-data-from-block')

// Logic to export a single raw block
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
const accepts = pathRest[0]

if (accepts !== undefined && accepts !== path) {
Expand All @@ -13,6 +13,9 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d

size = size || node.length

let offset = options.offset
let length = options.length

if (offset < 0) {
return pull.error(new Error('Offset must be greater than or equal to 0'))
}
Expand Down
37 changes: 24 additions & 13 deletions src/resolve.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const UnixFS = require('ipfs-unixfs')
const pull = require('pull-stream')
const paramap = require('pull-paramap')
const CID = require('cids')
const waterfall = require('async/waterfall')

const resolvers = {
directory: require('./dir-flat'),
Expand Down Expand Up @@ -34,30 +35,37 @@ function createResolver (dag, options, depth, parent) {
}

if (item.object) {
return cb(null, resolveItem(null, item.object, item, options.offset, options.length))
return cb(null, resolveItem(null, item.object, item, options))
}

const cid = new CID(item.multihash)

dag.get(cid, (err, node) => {
if (err) {
return cb(err)
}

// const name = item.fromPathRest ? item.name : item.path
cb(null, resolveItem(cid, node.value, item, options.offset, options.length))
})
waterfall([
(done) => dag.get(cid, done),
(node, done) => done(null, resolveItem(cid, node.value, item, options))
], cb)
}),
pull.flatten(),
pull.filter(Boolean),
pull.filter((node) => node.depth <= options.maxDepth)
)

function resolveItem (cid, node, item, offset, length) {
return resolve(cid, node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth, offset, length)
function resolveItem (cid, node, item, options) {
return resolve({
cid,
node,
name: item.name,
path: item.path,
pathRest: item.pathRest,
size: item.size,
dag,
parentNode: item.parent || parent,
depth: item.depth,
options
})
}

function resolve (cid, node, name, path, pathRest, size, dag, parentNode, depth, offset, length) {
function resolve ({ cid, node, name, path, pathRest, size, dag, parentNode, depth, options }) {
let type

try {
Expand All @@ -67,11 +75,14 @@ function createResolver (dag, options, depth, parent) {
}

const nodeResolver = resolvers[type]

if (!nodeResolver) {
return pull.error(new Error('Unkown node type ' + type))
}

const resolveDeep = createResolver(dag, options, depth, node)
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, offset, length)

return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, options)
}
}

Expand Down
5 changes: 3 additions & 2 deletions test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const randomBytes = require('./helpers/random-bytes')
const exporter = require('../src')
const importer = require('ipfs-unixfs-importer')

const SHARD_SPLIT_THRESHOLD = 1000
const SHARD_SPLIT_THRESHOLD = 10

describe('exporter sharded', () => {
let ipld
Expand Down Expand Up @@ -49,7 +49,8 @@ describe('exporter sharded', () => {
}))
),
importer(ipld, {
wrap: true
wrap: true,
shardSplitThreshold: SHARD_SPLIT_THRESHOLD
}),
pull.collect(cb)
),
Expand Down
44 changes: 44 additions & 0 deletions test/exporter-subtree.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,50 @@ describe('exporter subtree', () => {
}
], done)
})

it('exports all components of a path', (done) => {
const content = randomBytes(ONE_MEG)

waterfall([
(cb) => pull(
pull.values([{
path: './200Bytes.txt',
content: randomBytes(ONE_MEG)
}, {
path: './level-1/200Bytes.txt',
content
}, {
path: './level-1/level-2'
}, {
path: './level-1/level-2/200Bytes.txt',
content
}]),
importer(ipld),
pull.collect(cb)
),
(files, cb) => cb(null, files.pop().multihash),
(buf, cb) => cb(null, new CID(buf)),
(cid, cb) => pull(
exporter(`${cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`, ipld, {
fullPath: true
}),
pull.collect((err, files) => cb(err, { cid, files }))
),
({ cid, files }, cb) => {
expect(files.length).to.equal(4)
expect(files[0].path).to.equal(cid.toBaseEncodedString())
expect(files[0].name).to.equal(cid.toBaseEncodedString())
expect(files[1].path).to.equal(`${cid.toBaseEncodedString()}/level-1`)
expect(files[1].name).to.equal('level-1')
expect(files[2].path).to.equal(`${cid.toBaseEncodedString()}/level-1/level-2`)
expect(files[2].name).to.equal('level-2')
expect(files[3].path).to.equal(`${cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`)
expect(files[3].name).to.equal('200Bytes.txt')

cb()
}
], done)
})
})

function fileEql (f1, f2, done) {
Expand Down
11 changes: 6 additions & 5 deletions test/exporter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ describe('exporter', () => {
})
}

function addTestFile ({file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves}, cb) {
function addTestFile ({ file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }, cb) {
pull(
pull.values([{
path,
Expand All @@ -79,8 +79,8 @@ describe('exporter', () => {
)
}

function addAndReadTestFile ({file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves}, cb) {
addTestFile({file, strategy, path, maxChunkSize, rawLeaves}, (error, multihash) => {
function addAndReadTestFile ({ file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }, cb) {
addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }, (error, multihash) => {
if (error) {
return cb(error)
}
Expand All @@ -100,7 +100,7 @@ describe('exporter', () => {
})
}

function addTestDirectory ({directory, strategy = 'balanced', maxChunkSize}, callback) {
function addTestDirectory ({ directory, strategy = 'balanced', maxChunkSize }, callback) {
const input = push()
const dirName = path.basename(directory)

Expand Down Expand Up @@ -293,7 +293,8 @@ describe('exporter', () => {
content: randomBytes(100),
links: [
new DAGLink('', file.node.size, file.cid)
]}, cb),
]
}, cb),
(result, cb) => {
pull(
exporter(result.cid, ipld, {
Expand Down