Skip to content

Commit 2796f53

Browse files
committed
for testing purposes
1 parent 154bc60 commit 2796f53

File tree

2 files changed

+39
-29
lines changed

2 files changed

+39
-29
lines changed

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "ipfs-repo",
33
"version": "0.9.1",
44
"description": "IPFS Repo implementation",
5-
"main": "lib/index.js",
5+
"main": "src/index.js",
66
"jsnext:main": "src/index.js",
77
"scripts": {
88
"test": "aegir-test",
@@ -67,4 +67,4 @@
6767
"nginnever <ginneversource@gmail.com>",
6868
"npmcdn-to-unpkg-bot <npmcdn-to-unpkg-bot@users.noreply.github.com>"
6969
]
70-
}
70+
}

src/stores/blockstore.js

Lines changed: 37 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,23 @@
11
'use strict'
22

33
const Block = require('ipfs-block')
4-
const pull = require('pull-stream')
54
const Lock = require('lock')
65
const base32 = require('base32.js')
76
const path = require('path')
8-
const pullWrite = require('pull-write')
97
const parallel = require('run-parallel')
8+
const pull = require('pull-stream')
9+
const pullWrite = require('pull-write')
1010
const pullDefer = require('pull-defer/source')
1111

1212
const PREFIX_LENGTH = 5
13+
const EXTENSION = 'data'
1314

1415
exports = module.exports
1516

1617
function multihashToPath (multihash) {
17-
const extension = 'data'
1818
const encoder = new base32.Encoder()
1919
const hash = encoder.write(multihash).finalize()
20-
const filename = `${hash}.${extension}`
20+
const filename = `${hash}.${EXTENSION}`
2121
const folder = filename.slice(0, PREFIX_LENGTH)
2222

2323
return path.join(folder, filename)
@@ -87,7 +87,7 @@ exports.setUp = (basePath, BlobStore, locks) => {
8787
},
8888

8989
/*
90-
* returns a pull-stream to write blockBlob into
90+
* returns a pull-stream that expexts blockBlobs
9191
* NOTE: blockBlob is a { data: <>, key: <> } and not a
9292
* ipfs-block instance. This is because Block instances support
9393
* several types of hashing and it is up to the BlockService
@@ -101,26 +101,7 @@ exports.setUp = (basePath, BlobStore, locks) => {
101101
let push = null
102102

103103
const sink = pullWrite((blockBlobs, cb) => {
104-
const tasks = blockBlobs.map((blockBlob) => {
105-
return (cb) => {
106-
writeBlock(blockBlob, (err, meta) => {
107-
if (err) {
108-
return cb(err)
109-
}
110-
111-
if (push) {
112-
const read = push
113-
push = null
114-
read(null, meta)
115-
return cb()
116-
}
117-
118-
written.push(meta)
119-
cb()
120-
})
121-
}
122-
})
123-
104+
const tasks = writeTasks(blockBlobs)
124105
parallel(tasks, cb)
125106
}, null, 100, (err) => {
126107
ended = err || true
@@ -129,7 +110,7 @@ exports.setUp = (basePath, BlobStore, locks) => {
129110
}
130111
})
131112

132-
// TODO ??Why does a putStream need to be a source as well??
113+
// TODO ?? Why does a putStream need to be a source as well??
133114
const source = (end, cb) => {
134115
if (end) {
135116
ended = end
@@ -145,7 +126,36 @@ exports.setUp = (basePath, BlobStore, locks) => {
145126
push = cb
146127
}
147128

148-
return { source: source, sink: sink }
129+
/*
130+
* Creates individual tasks to write each block blob that can be
131+
* exectured in parallel
132+
*/
133+
function writeTasks (blockBlobs) {
134+
return blockBlobs.map((blockBlob) => {
135+
return (cb) => {
136+
writeBlock(blockBlob, (err, meta) => {
137+
if (err) {
138+
return cb(err)
139+
}
140+
141+
if (push) {
142+
const read = push
143+
push = null
144+
read(null, meta)
145+
return cb()
146+
}
147+
148+
written.push(meta)
149+
cb()
150+
})
151+
}
152+
})
153+
}
154+
155+
return {
156+
source: source,
157+
sink: sink
158+
}
149159
},
150160

151161
has (key, callback) {

0 commit comments

Comments
 (0)