Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit cd3d25e

Browse files
committed
fix: new header and more error handling
1 parent 0d4581a commit cd3d25e

File tree

2 files changed

+62
-44
lines changed

2 files changed

+62
-44
lines changed

src/http/api/routes/files.js

Lines changed: 60 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@ const StreamConcat = require('stream-concat')
88
const boom = require('boom')
99
const pump = require('pump')
1010
const glob = require('fast-glob')
11-
const multipart = require('ipfs-multipart')
11+
const Joi = require('joi')
12+
const content = require('content')
13+
const { Parser } = require('ipfs-multipart')
1214
const toPull = require('stream-to-pull-stream')
1315
const toStream = require('pull-stream-to-stream')
1416
const pull = require('pull-stream')
@@ -20,12 +22,24 @@ const resources = require('./../resources')
2022

2123
const filesDir = tempy.directory()
2224

23-
const createMultipartReply = (readStream, boundary, ipfs, query, reply, cb) => {
24-
const fileAdder = pushable()
25-
let parser = null
25+
const parseChunkedInput = (request) => {
26+
const input = request.headers['x-chunked-input']
27+
const regex = /^uuid="([^"]+)";\s*index=(\d*)/i
28+
29+
if (!input) {
30+
return null
31+
}
32+
const match = input.match(regex)
2633

27-
// use the other multipart factory for non chunked to get the boundary
28-
parser = new multipart.Parser({ boundary: boundary })
34+
return [match[1], Number(match[2])]
35+
}
36+
37+
const createMultipartReply = (readStream, request, reply, cb) => {
38+
const fileAdder = pushable()
39+
const boundary = content.type(request.headers['content-type']).boundary
40+
const ipfs = request.server.app.ipfs
41+
const query = request.query
42+
const parser = new Parser({ boundary: boundary })
2943
readStream.pipe(parser)
3044

3145
parser.on('file', (fileName, fileStream) => {
@@ -46,7 +60,7 @@ const createMultipartReply = (readStream, boundary, ipfs, query, reply, cb) => {
4660
fileAdder.end()
4761
})
4862

49-
// TODO: handle multipart errors
63+
parser.on('error', err => cb(err))
5064

5165
const pushStream = pushable()
5266
const abortStream = abortable()
@@ -136,7 +150,7 @@ module.exports = (server) => {
136150
config: {
137151
payload: {
138152
parse: false,
139-
output: 'stream',
153+
// output: 'stream',
140154
maxBytes: 10048576
141155
},
142156
handler: resources.files.add.handler,
@@ -154,48 +168,49 @@ module.exports = (server) => {
154168
maxBytes: 1000 * 1024 * 1024
155169
// maxBytes: 10485760
156170
},
171+
validate: {
172+
headers: {
173+
'content-range': Joi.string().regex(/(\d+)-(\d+)\/(\d+|\*)/),
174+
'x-chunked-input': Joi.string().regex(/^uuid="([^"]+)";\s*index=(\d*)/i)
175+
},
176+
options: {
177+
allowUnknown: true
178+
}
179+
},
157180
handler: (request, reply) => {
158-
// console.log('received')
159-
// console.log(request.headers['content-range'])
160-
// console.log(request.headers['x-ipfs-chunk-index'])
161-
// console.log(request.headers['x-ipfs-chunk-group-uuid'])
162-
const id = request.headers['x-ipfs-chunk-group-uuid']
163-
const boundary = request.headers['x-ipfs-chunk-boundary']
164-
const ipfs = request.server.app.ipfs
181+
const chunkedInput = parseChunkedInput(request)
165182

166-
// non chunked
183+
if (boom.isBoom(chunkedInput)) {
184+
return reply(chunkedInput)
185+
}
167186

168-
if (!id) {
187+
// non chunked
188+
if (!chunkedInput) {
169189
createMultipartReply(
170190
request.payload,
171-
boundary,
172-
ipfs,
173-
request.query,
191+
request,
174192
reply,
175-
() => {
193+
(err) => {
194+
if (err) {
195+
return reply(err)
196+
}
176197
console.log('Finished adding')
177198
}
178199
)
179200

180201
return
181202
}
182-
const index = Number(request.headers['x-ipfs-chunk-index'])
183-
const file = path.join(filesDir, id) + '-' + index
184-
const match = request.headers['content-range'].match(/(\d+)-(\d+)\/(\d+|\*)/)
185-
186-
if (!match || !match[1] || !match[2] || !match[3]) {
187-
return boom.badRequest('malformed content-range header')
188-
}
189203

190-
const start = parseInt(match[1])
191-
const end = parseInt(match[2])
192-
const total = parseInt(match[3])
204+
// chunked
205+
const [uuid, index] = chunkedInput
206+
const [, start, , total] = request.headers['content-range'].match(/(\d+)-(\d+)\/(\d+|\*)/)
207+
const file = path.join(filesDir, uuid) + '-' + index
193208

194209
// TODO validate duplicates, missing chunks
195210

196211
if (start === total) {
197212
/* all chunks have been received */
198-
const base = path.join(filesDir, id) + '-'
213+
const base = path.join(filesDir, uuid) + '-'
199214
const pattern = base + '*'
200215
const files = glob.sync(pattern)
201216

@@ -207,26 +222,29 @@ module.exports = (server) => {
207222
const nextStream = () => fileIndex === files.length ? null : fs.createReadStream(files[fileIndex++])
208223
createMultipartReply(
209224
new StreamConcat(nextStream),
210-
boundary,
211-
ipfs,
212-
request.query,
225+
request,
213226
reply,
214-
() => {
227+
(err) => {
228+
if (err) {
229+
return reply(err)
230+
}
231+
215232
console.log('Finished adding')
216-
del(pattern, { force: true })
217-
.then(paths => {
218-
console.log('Deleted files and folders:\n', paths.join('\n'))
219-
})
220-
.catch(console.error)
233+
// del(pattern, { force: true })
234+
// .then(paths => {
235+
// console.log('Deleted files and folders:\n', paths.join('\n'))
236+
// })
237+
// .catch(console.error)
221238
}
222239
)
223240
} else {
241+
console.log(file)
224242
pump(
225243
request.payload,
226244
fs.createWriteStream(file),
227245
(err) => {
228246
if (err) {
229-
reply(err)
247+
return reply(err)
230248
}
231249
reply({ Bytes: total })
232250
}

src/http/index.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,8 @@ function HttpApi (repo, config, cliArgs) {
105105
routes: {
106106
cors: {
107107
origin: ['*'],
108-
additionalHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length', 'Content-Type', 'Content-Range', 'x-ipfs-chunk-index', 'x-ipfs-chunk-group-uuid', 'x-ipfs-chunk-boundary'],
109-
additionalExposedHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length']
108+
additionalHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length', 'Content-Type', 'Content-Range', 'X-Chunked-Input'],
109+
additionalExposedHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length', 'X-Chunked-Input']
110110
}
111111
}
112112
},

0 commit comments

Comments
 (0)