Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit 444a5d3

Browse files
committed
feat: add fullPath option
When exporting a path with a subtree it's sometimes useful to get information about everything in the path and not just the node at the end. This PR adds an option to return intermediate nodes in the export stream. It also documents the `maxDepth` option and lowers the sharding threshold to make the shard tests more reliable in the browser.
1 parent a37808a commit 444a5d3

File tree

10 files changed

+189
-46
lines changed

10 files changed

+189
-46
lines changed

README.md

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,91 @@ pull(
104104
)
105105
```
106106

107+
### `fullPath`
108+
109+
If specified the exporter will emit an entry for every path component encountered.
110+
111+
```javascript
112+
const exporter = require('ipfs-unixfs-exporter')
113+
const pull = require('pull-stream')
114+
const collect = require('pull-stream/sinks/collect')
115+
116+
pull(
117+
exporter('QmFoo.../bar/baz.txt', ipld, {
118+
fullPath: true
119+
})
120+
collect((err, files) => {
121+
console.info(files)
122+
123+
// [{
124+
// depth: 0,
125+
// name: 'QmFoo...',
126+
// path: 'QmFoo...',
127+
// size: ...
128+
// hash: Buffer
129+
// content: undefined
130+
// type: 'dir'
131+
// }, {
132+
// depth: 1,
133+
// name: 'bar',
134+
// path: 'QmFoo.../bar',
135+
// size: ...
136+
// hash: Buffer
137+
// content: undefined
138+
// type: 'dir'
139+
// }, {
140+
// depth: 2,
141+
// name: 'baz.txt',
142+
// path: 'QmFoo.../bar/baz.txt',
143+
// size: ...
144+
// hash: Buffer
145+
// content: <Pull stream>
146+
// type: 'file'
147+
// }]
148+
//
149+
})
150+
)
151+
```
152+
153+
### `maxDepth`
154+
155+
If specified the exporter will only emit entries up to the specified depth.
156+
157+
```javascript
158+
const exporter = require('ipfs-unixfs-exporter')
159+
const pull = require('pull-stream')
160+
const collect = require('pull-stream/sinks/collect')
161+
162+
pull(
163+
exporter('QmFoo.../bar/baz.txt', ipld, {
164+
fullPath: true,
165+
maxDepth: 1
166+
})
167+
collect((err, files) => {
168+
console.info(files)
169+
170+
// [{
171+
// depth: 0,
172+
// name: 'QmFoo...',
173+
// path: 'QmFoo...',
174+
// size: ...
175+
// hash: Buffer
176+
// content: undefined
177+
// type: 'dir'
178+
// }, {
179+
// depth: 1,
180+
// name: 'bar',
181+
// path: 'QmFoo.../bar',
182+
// size: ...
183+
// hash: Buffer
184+
// content: undefined
185+
// type: 'dir'
186+
// }]
187+
//
188+
})
189+
)
190+
```
191+
107192
[dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md
108193
[ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver
109194
[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs

src/dir-flat.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const cat = require('pull-cat')
66
// Logic to export a unixfs directory.
77
module.exports = dirExporter
88

9-
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
9+
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
1010
const accepts = pathRest[0]
1111

1212
const dir = {
@@ -37,7 +37,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
3737
]
3838

3939
// place dir before if not specifying subtree
40-
if (!pathRest.length) {
40+
if (!pathRest.length || options.fullPath) {
4141
streams.unshift(pull.values([dir]))
4242
}
4343

src/dir-hamt-sharded.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const cat = require('pull-cat')
66
// Logic to export a unixfs directory.
77
module.exports = shardedDirExporter
88

9-
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) {
9+
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) {
1010
let dir
1111
if (!parent || (parent.path !== path)) {
1212
dir = {
@@ -49,7 +49,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
4949
)
5050
]
5151

52-
if (!pathRest.length) {
52+
if (!pathRest.length || options.fullPath) {
5353
streams.unshift(pull.values([dir]))
5454
}
5555

src/file.js

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const paramap = require('pull-paramap')
77
const extractDataFromBlock = require('./extract-data-from-block')
88

99
// Logic to export a single (possibly chunked) unixfs file.
10-
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
10+
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
1111
const accepts = pathRest[0]
1212

1313
if (accepts !== undefined && accepts !== path) {
@@ -24,19 +24,19 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
2424

2525
const fileSize = size || file.fileSize()
2626

27-
if (offset < 0) {
27+
if (options.offset < 0) {
2828
return pull.error(new Error('Offset must be greater than or equal to 0'))
2929
}
3030

31-
if (offset > fileSize) {
31+
if (options.offset > fileSize) {
3232
return pull.error(new Error('Offset must be less than the file size'))
3333
}
3434

35-
if (length < 0) {
35+
if (options.length < 0) {
3636
return pull.error(new Error('Length must be greater than or equal to 0'))
3737
}
3838

39-
if (length === 0) {
39+
if (options.length === 0) {
4040
return pull.once({
4141
depth: depth,
4242
content: pull.once(Buffer.alloc(0)),
@@ -48,15 +48,15 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
4848
})
4949
}
5050

51-
if (!offset) {
52-
offset = 0
51+
if (!options.offset) {
52+
options.offset = 0
5353
}
5454

55-
if (!length || (offset + length > fileSize)) {
56-
length = fileSize - offset
55+
if (!options.length || (options.offset + options.length > fileSize)) {
56+
options.length = fileSize - options.offset
5757
}
5858

59-
const content = streamBytes(dag, node, fileSize, offset, length)
59+
const content = streamBytes(dag, node, fileSize, options.offset, options.length)
6060

6161
return pull.values([{
6262
depth: depth,

src/index.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,8 @@ function pathBaseAndRest (path) {
3838
const defaultOptions = {
3939
maxDepth: Infinity,
4040
offset: undefined,
41-
length: undefined
41+
length: undefined,
42+
fullPath: false
4243
}
4344

4445
module.exports = (path, dag, options) => {
@@ -70,7 +71,7 @@ module.exports = (path, dag, options) => {
7071
return {
7172
depth: node.depth,
7273
name: node.name,
73-
path: finalPathFor(node),
74+
path: options.fullPath ? node.path : finalPathFor(node),
7475
size: node.size,
7576
hash: node.multihash,
7677
content: node.content,

src/raw.js

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ const pull = require('pull-stream')
44
const extractDataFromBlock = require('./extract-data-from-block')
55

66
// Logic to export a single raw block
7-
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => {
7+
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
88
const accepts = pathRest[0]
99

1010
if (accepts !== undefined && accepts !== path) {
@@ -13,19 +13,19 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
1313

1414
size = size || node.length
1515

16-
if (offset < 0) {
16+
if (options.offset < 0) {
1717
return pull.error(new Error('Offset must be greater than or equal to 0'))
1818
}
1919

20-
if (offset > size) {
20+
if (options.offset > size) {
2121
return pull.error(new Error('Offset must be less than the file size'))
2222
}
2323

24-
if (length < 0) {
24+
if (options.length < 0) {
2525
return pull.error(new Error('Length must be greater than or equal to 0'))
2626
}
2727

28-
if (length === 0) {
28+
if (options.length === 0) {
2929
return pull.once({
3030
depth,
3131
content: pull.once(Buffer.alloc(0)),
@@ -37,17 +37,17 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
3737
})
3838
}
3939

40-
if (!offset) {
41-
offset = 0
40+
if (!options.offset) {
41+
options.offset = 0
4242
}
4343

44-
if (!length || (offset + length > size)) {
45-
length = size - offset
44+
if (!options.length || (options.offset + options.length > size)) {
45+
options.length = size - options.offset
4646
}
4747

4848
return pull.once({
4949
depth,
50-
content: pull.once(extractDataFromBlock(node, 0, offset, offset + length)),
50+
content: pull.once(extractDataFromBlock(node, 0, options.offset, options.offset + options.length)),
5151
hash: cid,
5252
name,
5353
path,

src/resolve.js

Lines changed: 24 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ const UnixFS = require('ipfs-unixfs')
44
const pull = require('pull-stream')
55
const paramap = require('pull-paramap')
66
const CID = require('cids')
7+
const waterfall = require('async/waterfall')
78

89
const resolvers = {
910
directory: require('./dir-flat'),
@@ -34,30 +35,37 @@ function createResolver (dag, options, depth, parent) {
3435
}
3536

3637
if (item.object) {
37-
return cb(null, resolveItem(null, item.object, item, options.offset, options.length))
38+
return cb(null, resolveItem(null, item.object, item, options))
3839
}
3940

4041
const cid = new CID(item.multihash)
4142

42-
dag.get(cid, (err, node) => {
43-
if (err) {
44-
return cb(err)
45-
}
46-
47-
// const name = item.fromPathRest ? item.name : item.path
48-
cb(null, resolveItem(cid, node.value, item, options.offset, options.length))
49-
})
43+
waterfall([
44+
(done) => dag.get(cid, done),
45+
(node, done) => done(null, resolveItem(cid, node.value, item, options))
46+
], cb)
5047
}),
5148
pull.flatten(),
5249
pull.filter(Boolean),
5350
pull.filter((node) => node.depth <= options.maxDepth)
5451
)
5552

56-
function resolveItem (cid, node, item, offset, length) {
57-
return resolve(cid, node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth, offset, length)
53+
function resolveItem (cid, node, item, options) {
54+
return resolve({
55+
cid,
56+
node,
57+
name: item.name,
58+
path: item.path,
59+
pathRest: item.pathRest,
60+
size: item.size,
61+
dag,
62+
parentNode: item.parent || parent,
63+
depth: item.depth,
64+
options
65+
})
5866
}
5967

60-
function resolve (cid, node, name, path, pathRest, size, dag, parentNode, depth, offset, length) {
68+
function resolve ({ cid, node, name, path, pathRest, size, dag, parentNode, depth, options }) {
6169
let type
6270

6371
try {
@@ -67,11 +75,14 @@ function createResolver (dag, options, depth, parent) {
6775
}
6876

6977
const nodeResolver = resolvers[type]
78+
7079
if (!nodeResolver) {
7180
return pull.error(new Error('Unkown node type ' + type))
7281
}
82+
7383
const resolveDeep = createResolver(dag, options, depth, node)
74-
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, offset, length)
84+
85+
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, options)
7586
}
7687
}
7788

test/exporter-sharded.spec.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ const randomBytes = require('./helpers/random-bytes')
1515
const exporter = require('../src')
1616
const importer = require('ipfs-unixfs-importer')
1717

18-
const SHARD_SPLIT_THRESHOLD = 1000
18+
const SHARD_SPLIT_THRESHOLD = 10
1919

2020
describe('exporter sharded', () => {
2121
let ipld
@@ -49,7 +49,8 @@ describe('exporter sharded', () => {
4949
}))
5050
),
5151
importer(ipld, {
52-
wrap: true
52+
wrap: true,
53+
shardSplitThreshold: SHARD_SPLIT_THRESHOLD
5354
}),
5455
pull.collect(cb)
5556
),

0 commit comments

Comments
 (0)