@@ -8,7 +8,9 @@ const StreamConcat = require('stream-concat')
8
8
const boom = require ( 'boom' )
9
9
const pump = require ( 'pump' )
10
10
const glob = require ( 'fast-glob' )
11
- const multipart = require ( 'ipfs-multipart' )
11
+ const Joi = require ( 'joi' )
12
+ const content = require ( 'content' )
13
+ const { Parser } = require ( 'ipfs-multipart' )
12
14
const toPull = require ( 'stream-to-pull-stream' )
13
15
const toStream = require ( 'pull-stream-to-stream' )
14
16
const pull = require ( 'pull-stream' )
@@ -20,12 +22,24 @@ const resources = require('./../resources')
20
22
21
23
const filesDir = tempy . directory ( )
22
24
23
- const createMultipartReply = ( readStream , boundary , ipfs , query , reply , cb ) => {
24
- const fileAdder = pushable ( )
25
- let parser = null
25
+ const parseChunkedInput = ( request ) => {
26
+ const input = request . headers [ 'x-chunked-input' ]
27
+ const regex = / ^ u u i d = " ( [ ^ " ] + ) " ; \s * i n d e x = ( \d * ) / i
28
+
29
+ if ( ! input ) {
30
+ return null
31
+ }
32
+ const match = input . match ( regex )
26
33
27
- // use the other multipart factory for non chunked to get the boundary
28
- parser = new multipart . Parser ( { boundary : boundary } )
34
+ return [ match [ 1 ] , Number ( match [ 2 ] ) ]
35
+ }
36
+
37
+ const createMultipartReply = ( readStream , request , reply , cb ) => {
38
+ const fileAdder = pushable ( )
39
+ const boundary = content . type ( request . headers [ 'content-type' ] ) . boundary
40
+ const ipfs = request . server . app . ipfs
41
+ const query = request . query
42
+ const parser = new Parser ( { boundary : boundary } )
29
43
readStream . pipe ( parser )
30
44
31
45
parser . on ( 'file' , ( fileName , fileStream ) => {
@@ -46,7 +60,7 @@ const createMultipartReply = (readStream, boundary, ipfs, query, reply, cb) => {
46
60
fileAdder . end ( )
47
61
} )
48
62
49
- // TODO: handle multipart errors
63
+ parser . on ( 'error' , err => cb ( err ) )
50
64
51
65
const pushStream = pushable ( )
52
66
const abortStream = abortable ( )
@@ -136,7 +150,7 @@ module.exports = (server) => {
136
150
config : {
137
151
payload : {
138
152
parse : false ,
139
- output : 'stream' ,
153
+ // output: 'stream',
140
154
maxBytes : 10048576
141
155
} ,
142
156
handler : resources . files . add . handler ,
@@ -154,48 +168,49 @@ module.exports = (server) => {
154
168
maxBytes : 1000 * 1024 * 1024
155
169
// maxBytes: 10485760
156
170
} ,
171
+ validate : {
172
+ headers : {
173
+ 'content-range' : Joi . string ( ) . regex ( / ( \d + ) - ( \d + ) \/ ( \d + | \* ) / ) ,
174
+ 'x-chunked-input' : Joi . string ( ) . regex ( / ^ u u i d = " ( [ ^ " ] + ) " ; \s * i n d e x = ( \d * ) / i)
175
+ } ,
176
+ options : {
177
+ allowUnknown : true
178
+ }
179
+ } ,
157
180
handler : ( request , reply ) => {
158
- // console.log('received')
159
- // console.log(request.headers['content-range'])
160
- // console.log(request.headers['x-ipfs-chunk-index'])
161
- // console.log(request.headers['x-ipfs-chunk-group-uuid'])
162
- const id = request . headers [ 'x-ipfs-chunk-group-uuid' ]
163
- const boundary = request . headers [ 'x-ipfs-chunk-boundary' ]
164
- const ipfs = request . server . app . ipfs
181
+ const chunkedInput = parseChunkedInput ( request )
165
182
166
- // non chunked
183
+ if ( boom . isBoom ( chunkedInput ) ) {
184
+ return reply ( chunkedInput )
185
+ }
167
186
168
- if ( ! id ) {
187
+ // non chunked
188
+ if ( ! chunkedInput ) {
169
189
createMultipartReply (
170
190
request . payload ,
171
- boundary ,
172
- ipfs ,
173
- request . query ,
191
+ request ,
174
192
reply ,
175
- ( ) => {
193
+ ( err ) => {
194
+ if ( err ) {
195
+ return reply ( err )
196
+ }
176
197
console . log ( 'Finished adding' )
177
198
}
178
199
)
179
200
180
201
return
181
202
}
182
- const index = Number ( request . headers [ 'x-ipfs-chunk-index' ] )
183
- const file = path . join ( filesDir , id ) + '-' + index
184
- const match = request . headers [ 'content-range' ] . match ( / ( \d + ) - ( \d + ) \/ ( \d + | \* ) / )
185
-
186
- if ( ! match || ! match [ 1 ] || ! match [ 2 ] || ! match [ 3 ] ) {
187
- return boom . badRequest ( 'malformed content-range header' )
188
- }
189
203
190
- const start = parseInt ( match [ 1 ] )
191
- const end = parseInt ( match [ 2 ] )
192
- const total = parseInt ( match [ 3 ] )
204
+ // chunked
205
+ const [ uuid , index ] = chunkedInput
206
+ const [ , start , , total ] = request . headers [ 'content-range' ] . match ( / ( \d + ) - ( \d + ) \/ ( \d + | \* ) / )
207
+ const file = path . join ( filesDir , uuid ) + '-' + index
193
208
194
209
// TODO validate duplicates, missing chunks
195
210
196
211
if ( start === total ) {
197
212
/* all chunks have been received */
198
- const base = path . join ( filesDir , id ) + '-'
213
+ const base = path . join ( filesDir , uuid ) + '-'
199
214
const pattern = base + '*'
200
215
const files = glob . sync ( pattern )
201
216
@@ -207,26 +222,29 @@ module.exports = (server) => {
207
222
const nextStream = ( ) => fileIndex === files . length ? null : fs . createReadStream ( files [ fileIndex ++ ] )
208
223
createMultipartReply (
209
224
new StreamConcat ( nextStream ) ,
210
- boundary ,
211
- ipfs ,
212
- request . query ,
225
+ request ,
213
226
reply ,
214
- ( ) => {
227
+ ( err ) => {
228
+ if ( err ) {
229
+ return reply ( err )
230
+ }
231
+
215
232
console . log ( 'Finished adding' )
216
- del ( pattern , { force : true } )
217
- . then ( paths => {
218
- console . log ( 'Deleted files and folders:\n' , paths . join ( '\n' ) )
219
- } )
220
- . catch ( console . error )
233
+ // del(pattern, { force: true })
234
+ // .then(paths => {
235
+ // console.log('Deleted files and folders:\n', paths.join('\n'))
236
+ // })
237
+ // .catch(console.error)
221
238
}
222
239
)
223
240
} else {
241
+ console . log ( file )
224
242
pump (
225
243
request . payload ,
226
244
fs . createWriteStream ( file ) ,
227
245
( err ) => {
228
246
if ( err ) {
229
- reply ( err )
247
+ return reply ( err )
230
248
}
231
249
reply ( { Bytes : total } )
232
250
}
0 commit comments