Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 1db4856

Browse files
more fixes for merkledag changes
1 parent a9d303c commit 1db4856

File tree

6 files changed

+115
-68
lines changed

6 files changed

+115
-68
lines changed

package.json

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "ipfs-unixfs-engine",
33
"version": "0.11.4",
44
"description": "JavaScript implementation of the unixfs Engine used by IPFS",
5-
"main": "lib/index.js",
5+
"main": "src/index.js",
66
"jsnext:main": "src/index.js",
77
"scripts": {
88
"lint": "aegir-lint",
@@ -45,10 +45,10 @@
4545
"pre-commit": "^1.1.3",
4646
"pull-zip": "^2.0.0",
4747
"raw-loader": "^0.5.1",
48-
"rimraf": "^2.5.4",
49-
"run-series": "^1.1.4"
48+
"rimraf": "^2.5.4"
5049
},
5150
"dependencies": {
51+
"async": "^2.0.1",
5252
"ipfs-merkle-dag": "^0.7.0",
5353
"ipfs-unixfs": "^0.1.4",
5454
"is-ipfs": "^0.2.0",
@@ -58,8 +58,7 @@
5858
"pull-pushable": "^2.0.1",
5959
"pull-stream": "^3.4.5",
6060
"pull-traverse": "^1.0.3",
61-
"pull-write": "^1.1.0",
62-
"run-parallel": "^1.1.6"
61+
"pull-write": "^1.1.0"
6362
},
6463
"contributors": [
6564
"David Dias <daviddias.p@gmail.com>",

src/exporter.js

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,11 @@ const dirExporter = require('./exporters/dir')
1111
const fileExporter = require('./exporters/file')
1212

1313
module.exports = (hash, dagService, options) => {
14-
hash = cleanMultihash(hash)
14+
try {
15+
hash = cleanMultihash(hash)
16+
} catch (err) {
17+
return pull.error(err)
18+
}
1519
options = options || {}
1620

1721
function visitor (item) {

src/importer.js

Lines changed: 31 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,12 @@ const assert = require('assert')
66
const pull = require('pull-stream')
77
const pushable = require('pull-pushable')
88
const write = require('pull-write')
9-
const parallel = require('run-parallel')
9+
const parallel = require('async/parallel')
10+
const waterfall = require('async/waterfall')
1011

1112
const fsc = require('./chunker-fixed-size')
1213
const createAndStoreTree = require('./tree')
14+
const getSizeAndHash = require('./util').getSizeAndHash
1315

1416
const DAGNode = merkleDAG.DAGNode
1517

@@ -67,15 +69,18 @@ function createAndStoreDir (item, ds, cb) {
6769
const n = new DAGNode()
6870
n.data = d.marshal()
6971

70-
ds.put(n, (err) => {
71-
if (err) return cb(err)
72-
cb(null, {
73-
path: item.path,
74-
multihash: n.multihash(),
75-
size: n.size()
76-
// dataSize: d.fileSize()
77-
})
78-
})
72+
waterfall([
73+
(cb) => ds.put(n, cb),
74+
(cb) => getSizeAndHash(n, cb),
75+
(res, cb) => {
76+
cb(null, {
77+
path: item.path,
78+
multihash: res.multihash,
79+
size: res.size
80+
// dataSize: d.fileSize()
81+
})
82+
}
83+
], cb)
7984
}
8085

8186
function createAndStoreFile (file, ds, cb) {
@@ -102,14 +107,17 @@ function createAndStoreFile (file, ds, cb) {
102107
const l = new UnixFS('file', Buffer(chunk))
103108
const n = new DAGNode(l.marshal())
104109

105-
ds.put(n, (err) => {
110+
waterfall([
111+
(cb) => ds.put(n, cb),
112+
(cb) => getSizeAndHash(n, cb)
113+
], (err, stats) => {
106114
if (err) {
107-
return cb(new Error('Failed to store chunk'))
115+
return cb(err)
108116
}
109117

110118
cb(null, {
111-
Hash: n.multihash(),
112-
Size: n.size(),
119+
Hash: stats.multihash,
120+
Size: stats.size,
113121
leafSize: l.fileSize(),
114122
Name: ''
115123
})
@@ -140,13 +148,18 @@ function createAndStoreFile (file, ds, cb) {
140148
}
141149

142150
n.data = f.marshal()
143-
ds.put(n, (err) => {
144-
if (err) return cb(err)
151+
parallel([
152+
(cb) => ds.put(n, cb),
153+
(cb) => getSizeAndHash(n, cb)
154+
], (err, res) => {
155+
if (err) {
156+
return cb(err)
157+
}
145158

146159
cb(null, {
147160
path: file.path,
148-
multihash: n.multihash(),
149-
size: n.size()
161+
multihash: res[1].multihash,
162+
size: res[1].size
150163
// dataSize: f.fileSize()
151164
})
152165
})

src/tree.js

Lines changed: 66 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33
const mh = require('multihashes')
44
const UnixFS = require('ipfs-unixfs')
55
const merkleDAG = require('ipfs-merkle-dag')
6+
const mapValues = require('async/mapValues')
7+
8+
const getSizeAndHash = require('./util').getSizeAndHash
69

710
const DAGLink = merkleDAG.DAGLink
811
const DAGNode = merkleDAG.DAGNode
@@ -75,55 +78,75 @@ module.exports = (files, dagService, source, cb) => {
7578

7679
function traverse (tree, path, done) {
7780
const keys = Object.keys(tree)
78-
let tmpTree = tree
79-
keys.map((key) => {
80-
if (typeof tmpTree[key] === 'object' &&
81-
!Buffer.isBuffer(tmpTree[key])) {
82-
tmpTree[key] = traverse.call(this, tmpTree[key], path ? path + '/' + key : key, done)
83-
}
84-
})
85-
86-
// at this stage, all keys are multihashes
87-
// create a dir node
88-
// add all the multihashes as links
89-
// return this new node multihash
90-
91-
const d = new UnixFS('directory')
92-
const n = new DAGNode()
93-
94-
keys.forEach((key) => {
95-
const b58mh = mh.toB58String(tmpTree[key])
96-
const l = new DAGLink(
97-
key, mhIndex[b58mh].size, tmpTree[key])
98-
n.addRawLink(l)
99-
})
100-
101-
n.data = d.marshal()
10281

103-
pendingWrites++
104-
dagService.put(n, (err) => {
105-
pendingWrites--
82+
mapValues(tree, (node, key, cb) => {
83+
if (typeof node === 'object' && !Buffer.isBuffer(node)) {
84+
traverse.call(this, node, path ? `${path}/${key}` : key, cb)
85+
} else {
86+
cb(null, node)
87+
}
88+
}, (err, tmpTree) => {
10689
if (err) {
107-
source.push(new Error('failed to store dirNode'))
108-
} else if (path) {
109-
source.push({
110-
path: path,
111-
multihash: n.multihash(),
112-
size: n.size()
113-
})
90+
return done(err)
11491
}
11592

116-
if (pendingWrites <= 0) {
117-
done()
93+
// at this stage, all keys are multihashes
94+
// create a dir node
95+
// add all the multihashes as links
96+
// return this new node multihash
97+
98+
const d = new UnixFS('directory')
99+
const n = new DAGNode()
100+
101+
keys.forEach((key) => {
102+
const b58mh = mh.toB58String(tmpTree[key])
103+
const l = new DAGLink(
104+
key, mhIndex[b58mh].size, tmpTree[key])
105+
n.addRawLink(l)
106+
})
107+
108+
n.data = d.marshal()
109+
110+
pendingWrites++
111+
dagService.put(n, (err) => {
112+
pendingWrites--
113+
if (err) {
114+
source.push(new Error('failed to store dirNode'))
115+
} else if (path) {
116+
getSizeAndHash(n, (err, stats) => {
117+
if (err) {
118+
return source.push(err)
119+
}
120+
source.push({
121+
path: path,
122+
multihash: stats.multihash,
123+
size: stats.size
124+
})
125+
})
126+
}
127+
128+
if (pendingWrites <= 0) {
129+
finish()
130+
}
131+
})
132+
133+
function finish () {
134+
if (!path) {
135+
return done()
136+
}
137+
138+
getSizeAndHash(n, (err, stats) => {
139+
if (err) {
140+
return done(err)
141+
}
142+
143+
mhIndex[mh.toB58String(stats.multihash)] = {
144+
size: stats.size
145+
}
146+
done(null, stats.multihash)
147+
})
118148
}
119149
})
120-
121-
if (!path) {
122-
return
123-
}
124-
125-
mhIndex[mh.toB58String(n.multihash())] = { size: n.size() }
126-
return n.multihash()
127150
}
128151

129152
traverse(fileTree, null, cb)

src/util.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ const UnixFS = require('ipfs-unixfs')
44
const pull = require('pull-stream')
55
const mh = require('multihashes')
66
const isIPFS = require('is-ipfs')
7+
const parallel = require('async/parallel')
78

89
exports.switchType = (node, dirHandler, fileHandler) => {
910
const data = UnixFS.unmarshal(node.data)
@@ -26,3 +27,10 @@ exports.cleanMultihash = (multihash) => {
2627

2728
return multihash
2829
}
30+
31+
exports.getSizeAndHash = (n, cb) => {
32+
parallel({
33+
multihash: (cb) => n.multihash(cb),
34+
size: (cb) => n.size(cb)
35+
}, cb)
36+
}

test/node.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const path = require('path')
77
const IPFSRepo = require('ipfs-repo')
88
const Store = require('fs-pull-blob-store')
99
const mkdirp = require('mkdirp')
10-
const series = require('run-series')
10+
const series = require('async/series')
1111

1212
describe('core', () => {
1313
const repoExample = path.join(process.cwd(), '/test/repo-example')

0 commit comments

Comments
 (0)