1
- # IPFS unixFS Engine
1
+ IPFS unixFS Engine
2
+ ==================
2
3
3
4
[ ![ ] ( https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square )] ( http://ipn.io )
4
5
[ ![ ] ( https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square )] ( http://ipfs.io/ )
@@ -48,19 +49,19 @@ And write the importing logic:
48
49
``` js
49
50
// Dependencies to create a DAG Service (where the dir will be imported into)
50
51
const memStore = require (' abstract-blob-store' )
51
- const ipfsRepo = require (' ipfs-repo' )
52
- const ipfsBlock = require (' ipfs-block' )
53
- const ipfsBlockService = require (' ipfs-block-service' )
54
- const ipfsMerkleDag = require (' ipfs-merkle-dag' )
52
+ const Repo = require (' ipfs-repo' )
53
+ const Block = require (' ipfs-block' )
54
+ const BlockService = require (' ipfs-block-service' )
55
+ const MerkleDag = require (' ipfs-merkle-dag' )
55
56
const fs = require (' fs' )
56
57
57
- const repo = new ipfsRepo (' ' , { stores: memStore })
58
- const blocks = new ipfsBlockService (repo)
59
- const dag = new ipfsMerkleDag.DAGService (blocks)
58
+ const repo = new Repo (' ' , { stores: memStore })
59
+ const blockService = new BlockService (repo)
60
+ const dagService = new ipfsMerkleDag.DAGService (blocks)
60
61
61
62
62
- const Importer = require (' ipfs-unixfs-engine' ).importer
63
- const add = new Importer (dag )
63
+ const Importer = require (' ipfs-unixfs-engine' ).Importer
64
+ const filesAddStream = new Importer (dagService )
64
65
65
66
// An array to hold the return of nested file/dir info from the importer
66
67
// A root DAG Node is received upon completion
@@ -76,26 +77,24 @@ const input2 = {path: /tmp/foo/quxx, content: rs2}
76
77
77
78
// Listen for the data event from the importer stream
78
79
79
- add .on (' data' , (info ) => {
80
+ filesAddStream .on (' data' , (info ) => {
80
81
res .push (info)
81
82
})
82
83
83
84
// The end event of the stream signals that the importer is done
84
85
85
- add .on (' end' , () => {
86
- console .log (' Finished adding files!' )
87
- return
86
+ filesAddStream .on (' end' , () => {
87
+ console .log (' Finished filesAddStreaming files!' )
88
88
})
89
89
90
- // Calling write on the importer to add the file/object tuples
90
+ // Calling write on the importer to filesAddStream the file/object tuples
91
91
92
- add .write (input)
93
- add .write (input2)
94
- add .end ()
92
+ filesAddStream .write (input)
93
+ filesAddStream .write (input2)
94
+ filesAddStream .end ()
95
95
```
96
96
97
97
When run, the stat of DAG Node is outputted for each file on data event until the root:
98
-
99
98
```
100
99
{ multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
101
100
size: 39243,
@@ -143,38 +142,37 @@ Nodes.
143
142
### Example Exporter
144
143
145
144
```
146
- const ipfsRepo = require('ipfs-repo')
147
- const ipfsBlock = require('ipfs-block')
148
- const ipfsBlockService = require('ipfs-block-service')
149
- const ipfsMerkleDag = require('ipfs-merkle-dag')
145
+ const Repo = require('ipfs-repo')
146
+ const Block = require('ipfs-block')
147
+ const BlockService = require('ipfs-block-service')
148
+ const MerkleDAG = require('ipfs-merkle-dag')
150
149
151
- const repo = new ipfsRepo ('', { stores: memStore })
152
- const blocks = new ipfsBlockService (repo)
153
- const dag = new ipfsMerkleDag .DAGService(blocks )
150
+ const repo = new Repo ('', { stores: memStore })
151
+ const blockService = new BlockService (repo)
152
+ const dagService = new MerkleDag .DAGService(blockService )
154
153
155
154
// Create an export readable object stream with the hash you want to export and a dag service
156
155
157
- const exportEvent = Exporter(hash , dag)
156
+ const filesStream = Exporter(<multihash> , dag)
158
157
159
158
// Pipe the return stream to console
160
159
161
- exportEvent .on('data', (result ) => {
162
- result.stream .pipe(process.stdout)
160
+ filesStream .on('data', (file ) => {
161
+ file.content .pipe(process.stdout)
163
162
}
164
163
```
165
164
166
165
### Exporter: API
166
+
167
167
``` js
168
- const Exporter = require (' ipfs-unixfs-engine' ).exporter
168
+ const Exporter = require (' ipfs-unixfs-engine' ).Exporter
169
169
```
170
170
171
171
### new Exporter(hash, dagService)
172
172
173
- Uses the given [ DAG Service] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their
174
- multiaddress.
173
+ Uses the given [ DAG Service] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their multiaddress.
175
174
176
- Creates a new readable stream in object mode that outputs objects of the
177
- form
175
+ Creates a new readable stream in object mode that outputs objects of the form
178
176
179
177
``` js
180
178
{
183
181
}
184
182
```
185
183
186
- Errors are received as with a normal stream, by listening on the ` 'error' ` event
187
- to be emitted.
184
+ Errors are received as with a normal stream, by listening on the ` 'error' ` event to be emitted.
188
185
189
186
190
187
[ DAG Service ] : https://github.com/vijayee/js-ipfs-merkle-dag/
0 commit comments