Skip to content

Commit f7e4047

Browse files
committed
feat: blockstore gets blockBlobs instead of blocks (the difference is that now it receives the key in which it should store it
1 parent 311551a commit f7e4047

File tree

3 files changed

+75
-54
lines changed

3 files changed

+75
-54
lines changed

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
"dependencies": {
4545
"babel-runtime": "^6.11.6",
4646
"base32.js": "^0.1.0",
47-
"ipfs-block": "^0.3.0",
47+
"ipfs-block": "^0.4.0",
4848
"lock": "^0.1.3",
4949
"multihashes": "^0.2.2",
5050
"pull-defer": "^0.2.2",
@@ -67,4 +67,4 @@
6767
"nginnever <ginneversource@gmail.com>",
6868
"npmcdn-to-unpkg-bot <npmcdn-to-unpkg-bot@users.noreply.github.com>"
6969
]
70-
}
70+
}

src/stores/blockstore.js

Lines changed: 61 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,23 @@
11
'use strict'
22

33
const Block = require('ipfs-block')
4-
const pull = require('pull-stream')
54
const Lock = require('lock')
65
const base32 = require('base32.js')
76
const path = require('path')
8-
const pullWrite = require('pull-write')
97
const parallel = require('run-parallel')
8+
const pull = require('pull-stream')
9+
const pullWrite = require('pull-write')
1010
const pullDefer = require('pull-defer/source')
1111

1212
const PREFIX_LENGTH = 5
13+
const EXTENSION = 'data'
1314

1415
exports = module.exports
1516

1617
function multihashToPath (multihash) {
17-
const extension = 'data'
1818
const encoder = new base32.Encoder()
1919
const hash = encoder.write(multihash).finalize()
20-
const filename = `${hash}.${extension}`
20+
const filename = `${hash}.${EXTENSION}`
2121
const folder = filename.slice(0, PREFIX_LENGTH)
2222

2323
return path.join(folder, filename)
@@ -27,17 +27,19 @@ exports.setUp = (basePath, BlobStore, locks) => {
2727
const store = new BlobStore(basePath + '/blocks')
2828
const lock = new Lock()
2929

30-
function writeBlock (block, callback) {
31-
if (!block || !block.data) {
30+
// blockBlob is an object with:
31+
// { data: <>, key: <> }
32+
function writeBlock (blockBlob, callback) {
33+
if (!blockBlob || !blockBlob.data) {
3234
return callback(new Error('Invalid block'))
3335
}
3436

35-
const key = multihashToPath(block.key())
37+
const key = multihashToPath(blockBlob.key)
3638

3739
lock(key, (release) => {
3840
pull(
3941
pull.values([
40-
block.data
42+
blockBlob.data
4143
]),
4244
store.write(key, release(released))
4345
)
@@ -84,35 +86,32 @@ exports.setUp = (basePath, BlobStore, locks) => {
8486
return deferred
8587
},
8688

87-
// returns a pull-stream to write blocks into
88-
// TODO use a more explicit name, given that getStream is just for
89-
// one block, multiple blocks should have different naming
89+
/*
90+
* putStream - write multiple blocks
91+
*
92+
* returns a pull-stream that expects blockBlobs
93+
*
94+
* NOTE: blockBlob is a { data: <>, key: <> } and not a
95+
* ipfs-block instance. This is because Block instances support
96+
* several types of hashing and it is up to the BlockService
97+
* to understand the right one to use (given the CID)
98+
*/
99+
// TODO
100+
// consider using a more explicit name, this can cause some confusion
101+
// since the natural association is
102+
// getStream - createReadStream - read one
103+
// putStream - createWriteStream - write one
104+
// where in fact it is:
105+
// getStream - createReadStream - read one (the same)
106+
// putStream - createFilesWriteStream = write several
107+
//
90108
putStream () {
91109
let ended = false
92110
let written = []
93111
let push = null
94112

95-
const sink = pullWrite((blocks, cb) => {
96-
const tasks = blocks.map((block) => {
97-
return (cb) => {
98-
writeBlock(block, (err, meta) => {
99-
if (err) {
100-
return cb(err)
101-
}
102-
103-
if (push) {
104-
const read = push
105-
push = null
106-
read(null, meta)
107-
return cb()
108-
}
109-
110-
written.push(meta)
111-
cb()
112-
})
113-
}
114-
})
115-
113+
const sink = pullWrite((blockBlobs, cb) => {
114+
const tasks = writeTasks(blockBlobs)
116115
parallel(tasks, cb)
117116
}, null, 100, (err) => {
118117
ended = err || true
@@ -121,7 +120,6 @@ exports.setUp = (basePath, BlobStore, locks) => {
121120
}
122121
})
123122

124-
// TODO ??Why does a putStream need to be a source as well??
125123
const source = (end, cb) => {
126124
if (end) {
127125
ended = end
@@ -137,7 +135,36 @@ exports.setUp = (basePath, BlobStore, locks) => {
137135
push = cb
138136
}
139137

140-
return { source: source, sink: sink }
138+
/*
139+
* Creates individual tasks to write each block blob that can be
140+
* exectured in parallel
141+
*/
142+
function writeTasks (blockBlobs) {
143+
return blockBlobs.map((blockBlob) => {
144+
return (cb) => {
145+
writeBlock(blockBlob, (err, meta) => {
146+
if (err) {
147+
return cb(err)
148+
}
149+
150+
if (push) {
151+
const read = push
152+
push = null
153+
read(null, meta)
154+
return cb()
155+
}
156+
157+
written.push(meta)
158+
cb()
159+
})
160+
}
161+
})
162+
}
163+
164+
return {
165+
source: source,
166+
sink: sink
167+
}
141168
},
142169

143170
has (key, callback) {

test/blockstore-test.js

Lines changed: 12 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,15 @@ module.exports = (repo) => {
1212
describe('blockstore', () => {
1313
const helloKey = 'CIQLS/CIQLSTJHXGJU2PQIUUXFFV62PWV7VREE57RXUU4A52IIR55M4LX432I.data'
1414

15-
const helloIpldKey = 'CIQO2/CIQO2EUTF47PSTAHSL54KUTDS2AAN2DH4URM7H5KRATUGQFCM4OUIQI.data'
16-
1715
const blockCollection = _.range(100).map((i) => new Block(new Buffer(`hello-${i}-${Math.random()}`)))
1816

1917
describe('.putStream', () => {
2018
it('simple', (done) => {
2119
const b = new Block('hello world')
2220
pull(
23-
pull.values([b]),
21+
pull.values([
22+
{ data: b.data, key: b.key() }
23+
]),
2424
repo.blockstore.putStream(),
2525
pull.collect((err, meta) => {
2626
expect(err).to.not.exist
@@ -43,13 +43,17 @@ module.exports = (repo) => {
4343
}
4444

4545
pull(
46-
pull.values([b]),
46+
pull.values([
47+
{ data: b.data, key: b.key() }
48+
]),
4749
repo.blockstore.putStream(),
4850
pull.collect(finish)
4951
)
5052

5153
pull(
52-
pull.values([b]),
54+
pull.values([
55+
{ data: b.data, key: b.key() }
56+
]),
5357
repo.blockstore.putStream(),
5458
pull.collect(finish)
5559
)
@@ -59,6 +63,9 @@ module.exports = (repo) => {
5963
parallel(_.range(50).map(() => (cb) => {
6064
pull(
6165
pull.values(blockCollection),
66+
pull.map((b) => {
67+
return { data: b.data, key: b.key() }
68+
}),
6269
repo.blockstore.putStream(),
6370
pull.collect((err, meta) => {
6471
expect(err).to.not.exist
@@ -69,19 +76,6 @@ module.exports = (repo) => {
6976
}), done)
7077
})
7178

72-
it('custom extension', function (done) {
73-
const b = new Block('hello world 2')
74-
pull(
75-
pull.values([b]),
76-
repo.blockstore.putStream(),
77-
pull.collect((err, meta) => {
78-
expect(err).to.not.exist
79-
expect(meta[0].key).to.be.eql(helloIpldKey)
80-
done()
81-
})
82-
)
83-
})
84-
8579
it('returns an error on invalid block', (done) => {
8680
pull(
8781
pull.values(['hello']),

0 commit comments

Comments
 (0)