Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit d6c57be

Browse files
authored
Merge pull request #3 from ipfs/add-full-path-option
feat: add `fullPath` option
2 parents 107e599 + 21dd221 commit d6c57be

File tree

10 files changed

+177
-28
lines changed

10 files changed

+177
-28
lines changed

README.md

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,91 @@ pull(
104104
)
105105
```
106106

107+
### `fullPath`
108+
109+
If specified the exporter will emit an entry for every path component encountered.
110+
111+
```javascript
112+
const exporter = require('ipfs-unixfs-exporter')
113+
const pull = require('pull-stream')
114+
const collect = require('pull-stream/sinks/collect')
115+
116+
pull(
117+
exporter('QmFoo.../bar/baz.txt', ipld, {
118+
fullPath: true
119+
})
120+
collect((err, files) => {
121+
console.info(files)
122+
123+
// [{
124+
// depth: 0,
125+
// name: 'QmFoo...',
126+
// path: 'QmFoo...',
127+
// size: ...
128+
// hash: Buffer
129+
// content: undefined
130+
// type: 'dir'
131+
// }, {
132+
// depth: 1,
133+
// name: 'bar',
134+
// path: 'QmFoo.../bar',
135+
// size: ...
136+
// hash: Buffer
137+
// content: undefined
138+
// type: 'dir'
139+
// }, {
140+
// depth: 2,
141+
// name: 'baz.txt',
142+
// path: 'QmFoo.../bar/baz.txt',
143+
// size: ...
144+
// hash: Buffer
145+
// content: <Pull stream>
146+
// type: 'file'
147+
// }]
148+
//
149+
})
150+
)
151+
```
152+
153+
### `maxDepth`
154+
155+
If specified the exporter will only emit entries up to the specified depth.
156+
157+
```javascript
158+
const exporter = require('ipfs-unixfs-exporter')
159+
const pull = require('pull-stream')
160+
const collect = require('pull-stream/sinks/collect')
161+
162+
pull(
163+
exporter('QmFoo.../bar/baz.txt', ipld, {
164+
fullPath: true,
165+
maxDepth: 1
166+
})
167+
collect((err, files) => {
168+
console.info(files)
169+
170+
// [{
171+
// depth: 0,
172+
// name: 'QmFoo...',
173+
// path: 'QmFoo...',
174+
// size: ...
175+
// hash: Buffer
176+
// content: undefined
177+
// type: 'dir'
178+
// }, {
179+
// depth: 1,
180+
// name: 'bar',
181+
// path: 'QmFoo.../bar',
182+
// size: ...
183+
// hash: Buffer
184+
// content: undefined
185+
// type: 'dir'
186+
// }]
187+
//
188+
})
189+
)
190+
```
191+
107192
[dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md
108193
[ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver
109194
[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs

src/dir-flat.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const cat = require('pull-cat')
66
// Logic to export a unixfs directory.
77
module.exports = dirExporter
88

9-
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
9+
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
1010
const accepts = pathRest[0]
1111

1212
const dir = {
@@ -37,7 +37,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
3737
]
3838

3939
// place dir before if not specifying subtree
40-
if (!pathRest.length) {
40+
if (!pathRest.length || options.fullPath) {
4141
streams.unshift(pull.values([dir]))
4242
}
4343

src/dir-hamt-sharded.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const cat = require('pull-cat')
66
// Logic to export a unixfs directory.
77
module.exports = shardedDirExporter
88

9-
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
9+
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
1010
let dir
1111
if (!parent || (parent.path !== path)) {
1212
dir = {
@@ -49,7 +49,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
4949
)
5050
]
5151

52-
if (!pathRest.length) {
52+
if (!pathRest.length || options.fullPath) {
5353
streams.unshift(pull.values([dir]))
5454
}
5555

src/file.js

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const paramap = require('pull-paramap')
77
const extractDataFromBlock = require('./extract-data-from-block')
88

99
// Logic to export a single (possibly chunked) unixfs file.
10-
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
10+
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
1111
const accepts = pathRest[0]
1212

1313
if (accepts !== undefined && accepts !== path) {
@@ -24,6 +24,9 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
2424

2525
const fileSize = size || file.fileSize()
2626

27+
let offset = options.offset
28+
let length = options.length
29+
2730
if (offset < 0) {
2831
return pull.error(new Error('Offset must be greater than or equal to 0'))
2932
}

src/index.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,8 @@ function pathBaseAndRest (path) {
3838
const defaultOptions = {
3939
maxDepth: Infinity,
4040
offset: undefined,
41-
length: undefined
41+
length: undefined,
42+
fullPath: false
4243
}
4344

4445
module.exports = (path, dag, options) => {
@@ -70,7 +71,7 @@ module.exports = (path, dag, options) => {
7071
return {
7172
depth: node.depth,
7273
name: node.name,
73-
path: finalPathFor(node),
74+
path: options.fullPath ? node.path : finalPathFor(node),
7475
size: node.size,
7576
hash: node.multihash,
7677
content: node.content,

src/raw.js

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const pull = require('pull-stream')
44
const extractDataFromBlock = require('./extract-data-from-block')
55

66
// Logic to export a single raw block
7-
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
7+
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
88
const accepts = pathRest[0]
99

1010
if (accepts !== undefined && accepts !== path) {
@@ -13,6 +13,9 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
1313

1414
size = size || node.length
1515

16+
let offset = options.offset
17+
let length = options.length
18+
1619
if (offset < 0) {
1720
return pull.error(new Error('Offset must be greater than or equal to 0'))
1821
}

src/resolve.js

Lines changed: 24 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ const UnixFS = require('ipfs-unixfs')
44
const pull = require('pull-stream')
55
const paramap = require('pull-paramap')
66
const CID = require('cids')
7+
const waterfall = require('async/waterfall')
78

89
const resolvers = {
910
directory: require('./dir-flat'),
@@ -34,30 +35,37 @@ function createResolver (dag, options, depth, parent) {
3435
}
3536

3637
if (item.object) {
37-
return cb(null, resolveItem(null, item.object, item, options.offset, options.length))
38+
return cb(null, resolveItem(null, item.object, item, options))
3839
}
3940

4041
const cid = new CID(item.multihash)
4142

42-
dag.get(cid, (err, node) => {
43-
if (err) {
44-
return cb(err)
45-
}
46-
47-
// const name = item.fromPathRest ? item.name : item.path
48-
cb(null, resolveItem(cid, node.value, item, options.offset, options.length))
49-
})
43+
waterfall([
44+
(done) => dag.get(cid, done),
45+
(node, done) => done(null, resolveItem(cid, node.value, item, options))
46+
], cb)
5047
}),
5148
pull.flatten(),
5249
pull.filter(Boolean),
5350
pull.filter((node) => node.depth <= options.maxDepth)
5451
)
5552

56-
function resolveItem (cid, node, item, offset, length) {
57-
return resolve(cid, node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth, offset, length)
53+
function resolveItem (cid, node, item, options) {
54+
return resolve({
55+
cid,
56+
node,
57+
name: item.name,
58+
path: item.path,
59+
pathRest: item.pathRest,
60+
size: item.size,
61+
dag,
62+
parentNode: item.parent || parent,
63+
depth: item.depth,
64+
options
65+
})
5866
}
5967

60-
function resolve (cid, node, name, path, pathRest, size, dag, parentNode, depth, offset, length) {
68+
function resolve ({ cid, node, name, path, pathRest, size, dag, parentNode, depth, options }) {
6169
let type
6270

6371
try {
@@ -67,11 +75,14 @@ function createResolver (dag, options, depth, parent) {
6775
}
6876

6977
const nodeResolver = resolvers[type]
78+
7079
if (!nodeResolver) {
7180
return pull.error(new Error('Unkown node type ' + type))
7281
}
82+
7383
const resolveDeep = createResolver(dag, options, depth, node)
74-
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, offset, length)
84+
85+
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, options)
7586
}
7687
}
7788

test/exporter-sharded.spec.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ const randomBytes = require('./helpers/random-bytes')
1515
const exporter = require('../src')
1616
const importer = require('ipfs-unixfs-importer')
1717

18-
const SHARD_SPLIT_THRESHOLD = 1000
18+
const SHARD_SPLIT_THRESHOLD = 10
1919

2020
describe('exporter sharded', function () {
2121
this.timeout(30000)
@@ -51,7 +51,8 @@ describe('exporter sharded', function () {
5151
}))
5252
),
5353
importer(ipld, {
54-
wrap: true
54+
wrap: true,
55+
shardSplitThreshold: SHARD_SPLIT_THRESHOLD
5556
}),
5657
pull.collect(cb)
5758
),

test/exporter-subtree.spec.js

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,50 @@ describe('exporter subtree', () => {
143143
}
144144
], done)
145145
})
146+
147+
it('exports all components of a path', (done) => {
148+
const content = randomBytes(ONE_MEG)
149+
150+
waterfall([
151+
(cb) => pull(
152+
pull.values([{
153+
path: './200Bytes.txt',
154+
content: randomBytes(ONE_MEG)
155+
}, {
156+
path: './level-1/200Bytes.txt',
157+
content
158+
}, {
159+
path: './level-1/level-2'
160+
}, {
161+
path: './level-1/level-2/200Bytes.txt',
162+
content
163+
}]),
164+
importer(ipld),
165+
pull.collect(cb)
166+
),
167+
(files, cb) => cb(null, files.pop().multihash),
168+
(buf, cb) => cb(null, new CID(buf)),
169+
(cid, cb) => pull(
170+
exporter(`${cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`, ipld, {
171+
fullPath: true
172+
}),
173+
pull.collect((err, files) => cb(err, { cid, files }))
174+
),
175+
({ cid, files }, cb) => {
176+
expect(files.length).to.equal(4)
177+
expect(files[0].path).to.equal(cid.toBaseEncodedString())
178+
expect(files[0].name).to.equal(cid.toBaseEncodedString())
179+
expect(files[1].path).to.equal(`${cid.toBaseEncodedString()}/level-1`)
180+
expect(files[1].name).to.equal('level-1')
181+
expect(files[2].path).to.equal(`${cid.toBaseEncodedString()}/level-1/level-2`)
182+
expect(files[2].name).to.equal('level-2')
183+
expect(files[3].path).to.equal(`${cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`)
184+
expect(files[3].name).to.equal('200Bytes.txt')
185+
186+
cb()
187+
}
188+
], done)
189+
})
146190
})
147191

148192
function fileEql (f1, f2, done) {

test/exporter.spec.js

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ describe('exporter', () => {
6060
})
6161
}
6262

63-
function addTestFile ({file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves}, cb) {
63+
function addTestFile ({ file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }, cb) {
6464
pull(
6565
pull.values([{
6666
path,
@@ -79,8 +79,8 @@ describe('exporter', () => {
7979
)
8080
}
8181

82-
function addAndReadTestFile ({file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves}, cb) {
83-
addTestFile({file, strategy, path, maxChunkSize, rawLeaves}, (error, multihash) => {
82+
function addAndReadTestFile ({ file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }, cb) {
83+
addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }, (error, multihash) => {
8484
if (error) {
8585
return cb(error)
8686
}
@@ -100,7 +100,7 @@ describe('exporter', () => {
100100
})
101101
}
102102

103-
function addTestDirectory ({directory, strategy = 'balanced', maxChunkSize}, callback) {
103+
function addTestDirectory ({ directory, strategy = 'balanced', maxChunkSize }, callback) {
104104
const input = push()
105105
const dirName = path.basename(directory)
106106

@@ -293,7 +293,8 @@ describe('exporter', () => {
293293
content: randomBytes(100),
294294
links: [
295295
new DAGLink('', file.node.size, file.cid)
296-
]}, cb),
296+
]
297+
}, cb),
297298
(result, cb) => {
298299
pull(
299300
exporter(result.cid, ipld, {

0 commit comments

Comments
 (0)