Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 24fb89f

Browse files
committed
feat: support chunked add requests
1 parent ccad1c8 commit 24fb89f

File tree

4 files changed

+156
-12
lines changed

4 files changed

+156
-12
lines changed

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,7 @@
170170
"stream-to-pull-stream": "^1.7.2",
171171
"tar-stream": "^1.6.1",
172172
"temp": "~0.8.3",
173+
"tempy": "^0.2.1",
173174
"through2": "^2.0.3",
174175
"update-notifier": "^2.5.0",
175176
"yargs": "^12.0.1",

src/http/api/resources/files.js

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,7 @@ exports.add = {
165165
},
166166

167167
handler: (request, reply) => {
168+
console.log('yoo')
168169
if (!request.payload) {
169170
return reply({
170171
Message: 'Array, Buffer, or String is required.',
@@ -182,6 +183,11 @@ exports.add = {
182183

183184
parser.on('file', (fileName, fileStream) => {
184185
fileName = decodeURIComponent(fileName)
186+
187+
console.log('file', fileName)
188+
fileStream.on('data', (d) => {
189+
console.log(d.byteLength)
190+
})
185191
const filePair = {
186192
path: fileName,
187193
content: toPull(fileStream)
@@ -192,7 +198,7 @@ exports.add = {
192198

193199
parser.on('directory', (directory) => {
194200
directory = decodeURIComponent(directory)
195-
201+
console.log('directory', directory)
196202
fileAdder.push({
197203
path: directory,
198204
content: ''
@@ -220,7 +226,7 @@ exports.add = {
220226
rawLeaves: request.query['raw-leaves'],
221227
progress: request.query.progress ? progressHandler : null,
222228
onlyHash: request.query['only-hash'],
223-
hashAlg: request.query['hash'],
229+
hashAlg: request.query.hash,
224230
wrapWithDirectory: request.query['wrap-with-directory'],
225231
pin: request.query.pin,
226232
chunker: request.query.chunker

src/http/api/routes/files.js

Lines changed: 142 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,66 @@
22

33
const resources = require('./../resources')
44
const mfs = require('ipfs-mfs/http')
5+
const fs = require('fs')
6+
const path = require('path')
7+
const tempy = require('tempy')
8+
const multipart = require('ipfs-multipart')
9+
const pull = require('pull-stream')
10+
const toPull = require('stream-to-pull-stream')
11+
const pushable = require('pull-pushable')
512

13+
const streams = []
14+
const filesDir = tempy.directory()
15+
16+
const createMultipartStream = (readStream, boundary, ipfs, cb) => {
17+
const parser = new multipart.Parser({ boundary: boundary })
18+
readStream.pipe(parser)
19+
const fileAdder = pushable()
20+
21+
parser.on('file', (fileName, fileStream) => {
22+
fileName = decodeURIComponent(fileName)
23+
24+
const filePair = {
25+
path: fileName,
26+
content: toPull(fileStream)
27+
}
28+
console.log(filePair)
29+
fileAdder.push(filePair)
30+
})
31+
32+
parser.on('directory', (directory) => {
33+
directory = decodeURIComponent(directory)
34+
fileAdder.push({
35+
path: directory,
36+
content: ''
37+
})
38+
})
39+
40+
parser.on('end', () => {
41+
fileAdder.end()
42+
})
43+
44+
pull(
45+
fileAdder,
46+
ipfs.files.addPullStream(),
47+
pull.map((file) => {
48+
return {
49+
Name: file.path, // addPullStream already turned this into a hash if it wanted to
50+
Hash: file.hash,
51+
Size: file.size
52+
}
53+
}),
54+
pull.collect((err, files) => {
55+
if (err) {
56+
cb(err)
57+
return
58+
}
59+
cb(null, files)
60+
})
61+
)
62+
63+
return parser
64+
}
665
module.exports = (server) => {
766
const api = server.select('API')
867

@@ -37,13 +96,95 @@ module.exports = (server) => {
3796
config: {
3897
payload: {
3998
parse: false,
40-
output: 'stream'
99+
output: 'stream',
100+
maxBytes: 10048576
41101
},
42102
handler: resources.files.add.handler,
43103
validate: resources.files.add.validate
44104
}
45105
})
46106

107+
api.route({
108+
// TODO fix method
109+
method: 'POST',
110+
path: '/api/v0/add-chunked',
111+
config: {
112+
payload: {
113+
parse: false,
114+
maxBytes: 10048576
115+
},
116+
handler: (request, reply) => {
117+
console.log('received')
118+
console.log(request.headers['content-range'])
119+
console.log(request.headers['ipfs-chunk-id'])
120+
console.log(request.headers['ipfs-chunk-name'])
121+
const boundary = request.headers['ipfs-chunk-boundary']
122+
const id = request.headers['ipfs-chunk-name'] // change name to id
123+
const index = Number(request.headers['ipfs-chunk-id'])
124+
const file = path.join(filesDir, id)
125+
const match = request.headers['content-range'].match(/(\d+)-(\d+)\/(\d+|\*)/)
126+
const ipfs = request.server.app.ipfs
127+
// if (!match || !match[1] || !match[2] || !match[3]) {
128+
/* malformed content-range header */
129+
// res.send('Bad Request', 400)
130+
// return;
131+
// }
132+
133+
const start = parseInt(match[1])
134+
const end = parseInt(match[2])
135+
const total = parseInt(match[3])
136+
// console.log(start, end, total, index, boundary)
137+
138+
let stream = streams[id]
139+
if (!stream) {
140+
console.log('create new stream', file)
141+
stream = fs.createWriteStream(file, {flags: 'a+'})
142+
streams[id] = stream
143+
}
144+
145+
console.log('stream', file)
146+
let size = 0
147+
if (fs.existsSync(file)) {
148+
size = fs.statSync(file).size
149+
}
150+
151+
if ((end + 1) === size) {
152+
/* duplicate chunk */
153+
// res.send('Created', 201)
154+
// return;
155+
}
156+
157+
if (start !== size) {
158+
/* missing chunk */
159+
// res.send('Bad Request', 400)
160+
// return;
161+
}
162+
163+
if (index === -1) {
164+
// check if size + payload.length === total
165+
/* all chunks have been received */
166+
stream.on('finish', function () {
167+
console.log('add to ipfs from the file')
168+
var readStream = fs.createReadStream(file)
169+
createMultipartStream(readStream, boundary, ipfs, (err, files) => {
170+
if (err) {
171+
console.error(err)
172+
}
173+
console.log('finished adding to ipfs', files)
174+
reply({files})
175+
})
176+
})
177+
178+
stream.end()
179+
} else {
180+
stream.write(request.payload)
181+
/* this chunk has been processed successfully */
182+
reply({ Bytes: request.payload.length })
183+
}
184+
}
185+
}
186+
})
187+
47188
api.route({
48189
// TODO fix method
49190
method: '*',

src/http/index.js

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,11 @@ function HttpApi (repo, config, cliArgs) {
103103
this.server = new Hapi.Server({
104104
connections: {
105105
routes: {
106-
cors: true
106+
cors: {
107+
origin: ['*'],
108+
additionalHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length', 'Content-Type', 'Content-Range', 'IPFS-CHUNK-NAME', 'IPFS-CHUNK-ID', 'ipfs-chunk-boundary'],
109+
additionalExposedHeaders: ['X-Stream-Output, X-Chunked-Output, X-Content-Length']
110+
}
107111
}
108112
},
109113
debug: process.env.DEBUG ? {
@@ -137,14 +141,6 @@ function HttpApi (repo, config, cliArgs) {
137141
// load gateway routes
138142
require('./gateway/routes')(this.server)
139143

140-
// Set default headers
141-
setHeader(this.server,
142-
'Access-Control-Allow-Headers',
143-
'X-Stream-Output, X-Chunked-Output, X-Content-Length')
144-
setHeader(this.server,
145-
'Access-Control-Expose-Headers',
146-
'X-Stream-Output, X-Chunked-Output, X-Content-Length')
147-
148144
this.server.start(cb)
149145
})
150146
},

0 commit comments

Comments
 (0)