Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit c7d12f6

Browse files
committed
feat: support for fetching subtree
1 parent defaf0a commit c7d12f6

File tree

9 files changed

+183
-36
lines changed

9 files changed

+183
-36
lines changed

src/exporter/dir-flat.js

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,9 @@ const cat = require('pull-cat')
99
// Logic to export a unixfs directory.
1010
module.exports = dirExporter
1111

12-
function dirExporter (node, name, ipldResolver, resolve, parent) {
12+
function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) {
13+
const accepts = pathRest.shift()
14+
1315
const dir = {
1416
path: name,
1517
hash: node.multihash
@@ -20,15 +22,17 @@ function dirExporter (node, name, ipldResolver, resolve, parent) {
2022
pull(
2123
pull.values(node.links),
2224
pull.map((link) => ({
25+
linkName: link.name,
2326
path: path.join(name, link.name),
2427
hash: link.multihash
2528
})),
29+
pull.filter((item) => accepts === undefined || item.linkName === accepts),
2630
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
2731
if (err) {
2832
return cb(err)
2933
}
3034

31-
cb(null, resolve(n.value, item.path, ipldResolver, name, parent))
35+
cb(null, resolve(n.value, item.path, pathRest, ipldResolver, name, parent))
3236
})),
3337
pull.flatten()
3438
)

src/exporter/dir-hamt-sharded.js

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ const cleanHash = require('./clean-multihash')
1010
// Logic to export a unixfs directory.
1111
module.exports = shardedDirExporter
1212

13-
function shardedDirExporter (node, name, ipldResolver, resolve, parent) {
13+
function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent) {
1414
let dir
1515
if (!parent || parent.path !== name) {
1616
dir = [{
@@ -25,22 +25,31 @@ function shardedDirExporter (node, name, ipldResolver, resolve, parent) {
2525
pull.values(node.links),
2626
pull.map((link) => {
2727
// remove the link prefix (2 chars for the bucket index)
28-
let p = link.name.substring(2)
29-
// another sharded dir or file?
30-
p = p ? path.join(name, p) : name
28+
const p = link.name.substring(2)
29+
const pp = p ? path.join(name, p) : name
30+
let accept = true
3131

32-
return {
33-
name: link.name,
34-
path: p,
35-
hash: link.multihash
32+
if (p && pathRest.length) {
33+
accept = (p === pathRest[0])
34+
}
35+
if (accept) {
36+
return {
37+
name: p,
38+
path: pp,
39+
hash: link.multihash,
40+
pathRest: p ? pathRest.slice(1) : pathRest
41+
}
42+
} else {
43+
return ''
3644
}
3745
}),
46+
pull.filter(Boolean),
3847
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
3948
if (err) {
4049
return cb(err)
4150
}
4251

43-
cb(null, resolve(n.value, item.path, ipldResolver, (dir && dir[0]) || parent))
52+
cb(null, resolve(n.value, item.path, item.pathRest, ipldResolver, (dir && dir[0]) || parent))
4453
})),
4554
pull.flatten()
4655
)

src/exporter/dir.js

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
'use strict'
22

3+
// TODO: REMOVE??
4+
35
const path = require('path')
46
const pull = require('pull-stream')
57
const paramap = require('pull-paramap')
@@ -12,7 +14,7 @@ const switchType = require('../util').switchType
1214
// Logic to export a unixfs directory.
1315
module.exports = dirExporter
1416

15-
function dirExporter (node, name, ipldResolver) {
17+
function dirExporter (node, name, pathRest, ipldResolver) {
1618
// The algorithm below is as follows
1719
//
1820
// 1. Take all links from a given directory node
@@ -24,12 +26,16 @@ function dirExporter (node, name, ipldResolver) {
2426
// - `file`: use the fileExporter to load and return the file
2527
// 4. Flatten
2628

29+
const accepts = pathRest.shift()
30+
2731
return pull(
2832
pull.values(node.links),
2933
pull.map((link) => ({
34+
linkName: link.name,
3035
path: path.join(name, link.name),
3136
hash: link.multihash
3237
})),
38+
pull.filter((item) => accepts === undefined || item.linkName === accepts),
3339
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => {
3440
if (err) {
3541
return cb(err)
@@ -44,8 +50,8 @@ function dirExporter (node, name, ipldResolver) {
4450

4551
cb(null, switchType(
4652
node,
47-
() => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]),
48-
() => fileExporter(node, item.path, ipldResolver)
53+
() => cat([pull.values([dir]), dirExporter(node, item.path, pathRest, ipldResolver)]),
54+
() => fileExporter(node, item.path, pathRest, ipldResolver)
4955
))
5056
})),
5157
pull.flatten()

src/exporter/file.js

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const pull = require('pull-stream')
77
const paramap = require('pull-paramap')
88

99
// Logic to export a single (possibly chunked) unixfs file.
10-
module.exports = (node, name, ipldResolver) => {
10+
module.exports = (node, name, pathRest, ipldResolver) => {
1111
function getData (node) {
1212
try {
1313
const file = UnixFS.unmarshal(node.data)
@@ -25,6 +25,12 @@ module.exports = (node, name, ipldResolver) => {
2525
)
2626
}
2727

28+
const accepts = pathRest.shift()
29+
30+
if (accepts !== undefined && accepts !== name) {
31+
return pull.empty()
32+
}
33+
2834
let content = pull(
2935
traverse.depthFirst(node, visitor),
3036
pull.map(getData)

src/exporter/index.js

Lines changed: 22 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -7,43 +7,46 @@ const pullDefer = require('pull-defer')
77

88
const resolve = require('./resolve').resolve
99

10-
function sanitize (path) {
10+
function pathBaseAndRest (path) {
1111
// Buffer -> raw multihash or CID in buffer
12-
if (Buffer.isBuffer(path)) {
13-
return new CID(path).toBaseEncodedString()
14-
}
12+
let pathBase = path
13+
let pathRest = '/'
1514

16-
if (CID.isCID(path)) {
17-
return path.toBaseEncodedString()
15+
if (Buffer.isBuffer(path)) {
16+
pathBase = (new CID(path)).toBaseEncodedString()
1817
}
1918

20-
try {
21-
const cid = new CID(path)
22-
return cid.toBaseEncodedString()
23-
} catch (err) {} // not an isolated CID, can be a path
24-
25-
if (v.ipfsPath(path)) {
26-
// trim that ipfs prefix
19+
if (typeof path === 'string') {
2720
if (path.indexOf('/ipfs/') === 0) {
2821
path = path.substring(6)
2922
}
23+
const subtreeStart = path.indexOf('/')
24+
if (subtreeStart > 0) {
25+
pathBase = path.substring(0, subtreeStart)
26+
pathRest = path.substring(subtreeStart)
27+
}
28+
} else if (CID.isCID(pathBase)) {
29+
pathBase = pathBase.toBaseEncodedString()
30+
}
31+
32+
pathBase = (new CID(pathBase)).toBaseEncodedString()
3033

31-
return path
32-
} else {
33-
throw new Error('not valid cid or path')
34+
return {
35+
base: pathBase,
36+
rest: pathRest.split('/').filter(Boolean)
3437
}
3538
}
3639

3740
module.exports = (path, dag) => {
3841
try {
39-
path = sanitize(path)
42+
path = pathBaseAndRest(path)
4043
} catch (err) {
4144
return pull.error(err)
4245
}
4346

4447
const d = pullDefer.source()
4548

46-
const cid = new CID(path)
49+
const cid = new CID(path.base)
4750

4851
dag.get(cid, (err, node) => {
4952
if (err) {
@@ -55,7 +58,7 @@ module.exports = (path, dag) => {
5558
return pull(
5659
d,
5760
pull.map((result) => result.value),
58-
pull.map((node) => resolve(node, path, dag)),
61+
pull.map((node) => resolve(node, path.base, path.rest, dag)),
5962
pull.flatten()
6063
)
6164
}

src/exporter/resolve.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,14 @@ module.exports = Object.assign({
1414
typeOf: typeOf
1515
}, resolvers)
1616

17-
function resolve (node, name, ipldResolver, parentNode) {
17+
function resolve (node, hash, pathRest, ipldResolver, parentNode) {
1818
const type = typeOf(node)
1919
const resolver = resolvers[type]
2020
if (!resolver) {
2121
return pull.error(new Error('Unkown node type ' + type))
2222
}
23-
let stream = resolver(node, name, ipldResolver, resolve, parentNode)
23+
// TODO: pass remaining path to filter output
24+
let stream = resolver(node, hash, pathRest, ipldResolver, resolve, parentNode)
2425
return stream
2526
}
2627

test/node.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ describe('IPFS UnixFS Engine', () => {
4444
require('./test-consumable-hash')
4545
require('./test-hamt')
4646
require('./test-exporter')(repo)
47+
require('./test-export-subtree')(repo)
4748
require('./test-importer')(repo)
4849
require('./test-importer-flush')(repo)
4950
require('./test-import-export')(repo)

test/test-dirbuilder-sharding.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,36 @@ module.exports = (repo) => {
334334
}
335335
}
336336
})
337+
338+
it('exports a big dir with subpath', (done) => {
339+
const exportHash = mh.toB58String(rootHash) + '/big/big/2000'
340+
const entries = {}
341+
pull(
342+
exporter(exportHash, ipldResolver),
343+
pull.collect(collected)
344+
)
345+
346+
function collected (err, nodes) {
347+
expect(err).to.not.exist()
348+
const paths = Object.keys(entries).sort()
349+
expect(nodes.length).to.equal(4)
350+
expect(nodes.map((node) => node.path)).to.deep.equal([
351+
'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d',
352+
'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big',
353+
'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big',
354+
'QmTm3ZdKxyDLvcEePEvGfB2QReXsiAF7f39yjRcWwWrA6d/big/big/2000'
355+
])
356+
pull(
357+
nodes[3].content,
358+
pull.collect((err, content) => {
359+
expect(err).to.not.exist()
360+
expect(content.toString()).to.equal('2000')
361+
done()
362+
})
363+
)
364+
}
365+
})
366+
337367
})
338368
})
339369
}

test/test-export-subtree.js

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
/* eslint-env mocha */
2+
'use strict'
3+
4+
const chai = require('chai')
5+
chai.use(require('dirty-chai'))
6+
const expect = chai.expect
7+
const BlockService = require('ipfs-block-service')
8+
const IPLDResolver = require('ipld-resolver')
9+
const UnixFS = require('ipfs-unixfs')
10+
const bs58 = require('bs58')
11+
const pull = require('pull-stream')
12+
const zip = require('pull-zip')
13+
const CID = require('cids')
14+
const loadFixture = require('aegir/fixtures')
15+
const Buffer = require('safe-buffer').Buffer
16+
17+
const unixFSEngine = require('./../src')
18+
const exporter = unixFSEngine.exporter
19+
20+
const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt')
21+
22+
module.exports = (repo) => {
23+
describe('exporter', () => {
24+
let ipldResolver
25+
26+
before(() => {
27+
const bs = new BlockService(repo)
28+
ipldResolver = new IPLDResolver(bs)
29+
})
30+
31+
32+
it('export a file 2 levels down', (done) => {
33+
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt'
34+
35+
pull(
36+
exporter(hash, ipldResolver),
37+
pull.collect((err, files) => {
38+
expect(err).to.not.exist()
39+
expect(files.length).to.equal(3)
40+
expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN')
41+
expect(files[0].content).to.not.exist()
42+
expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1')
43+
expect(files[1].content).to.not.exist()
44+
expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt')
45+
fileEql(files[2], smallFile, done)
46+
})
47+
)
48+
})
49+
50+
it('export a non existing file', (done) => {
51+
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/doesnotexist'
52+
53+
pull(
54+
exporter(hash, ipldResolver),
55+
pull.collect((err, files) => {
56+
expect(err).to.not.exist()
57+
expect(files.length).to.equal(1)
58+
expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN')
59+
expect(files[0].content).to.not.exist()
60+
done()
61+
})
62+
)
63+
})
64+
})
65+
}
66+
67+
function fileEql (f1, f2, done) {
68+
pull(
69+
f1.content,
70+
pull.collect((err, data) => {
71+
if (err) {
72+
return done(err)
73+
}
74+
75+
try {
76+
if (f2) {
77+
expect(Buffer.concat(data)).to.eql(f2)
78+
} else {
79+
expect(data).to.exist()
80+
}
81+
} catch (err) {
82+
return done(err)
83+
}
84+
done()
85+
})
86+
)
87+
}

0 commit comments

Comments
 (0)