|
2 | 2 |
|
3 | 3 | const resources = require('./../resources')
|
4 | 4 | const mfs = require('ipfs-mfs/http')
|
| 5 | +const fs = require('fs') |
| 6 | +const path = require('path') |
| 7 | +const tempy = require('tempy') |
| 8 | +const multipart = require('ipfs-multipart') |
| 9 | +const pull = require('pull-stream') |
| 10 | +const toPull = require('stream-to-pull-stream') |
| 11 | +const pushable = require('pull-pushable') |
5 | 12 |
|
| 13 | +const streams = [] |
| 14 | +const filesDir = tempy.directory() |
| 15 | + |
| 16 | +const createMultipartStream = (readStream, boundary, ipfs, cb) => { |
| 17 | + const parser = new multipart.Parser({ boundary: boundary }) |
| 18 | + readStream.pipe(parser) |
| 19 | + const fileAdder = pushable() |
| 20 | + |
| 21 | + parser.on('file', (fileName, fileStream) => { |
| 22 | + fileName = decodeURIComponent(fileName) |
| 23 | + |
| 24 | + const filePair = { |
| 25 | + path: fileName, |
| 26 | + content: toPull(fileStream) |
| 27 | + } |
| 28 | + console.log(filePair) |
| 29 | + fileAdder.push(filePair) |
| 30 | + }) |
| 31 | + |
| 32 | + parser.on('directory', (directory) => { |
| 33 | + directory = decodeURIComponent(directory) |
| 34 | + fileAdder.push({ |
| 35 | + path: directory, |
| 36 | + content: '' |
| 37 | + }) |
| 38 | + }) |
| 39 | + |
| 40 | + parser.on('end', () => { |
| 41 | + fileAdder.end() |
| 42 | + }) |
| 43 | + |
| 44 | + pull( |
| 45 | + fileAdder, |
| 46 | + ipfs.files.addPullStream(), |
| 47 | + pull.map((file) => { |
| 48 | + return { |
| 49 | + Name: file.path, // addPullStream already turned this into a hash if it wanted to |
| 50 | + Hash: file.hash, |
| 51 | + Size: file.size |
| 52 | + } |
| 53 | + }), |
| 54 | + pull.collect((err, files) => { |
| 55 | + if (err) { |
| 56 | + cb(err) |
| 57 | + return |
| 58 | + } |
| 59 | + cb(null, files) |
| 60 | + }) |
| 61 | + ) |
| 62 | + |
| 63 | + return parser |
| 64 | +} |
6 | 65 | module.exports = (server) => {
|
7 | 66 | const api = server.select('API')
|
8 | 67 |
|
@@ -37,13 +96,95 @@ module.exports = (server) => {
|
37 | 96 | config: {
|
38 | 97 | payload: {
|
39 | 98 | parse: false,
|
40 |
| - output: 'stream' |
| 99 | + output: 'stream', |
| 100 | + maxBytes: 10048576 |
41 | 101 | },
|
42 | 102 | handler: resources.files.add.handler,
|
43 | 103 | validate: resources.files.add.validate
|
44 | 104 | }
|
45 | 105 | })
|
46 | 106 |
|
| 107 | + api.route({ |
| 108 | + // TODO fix method |
| 109 | + method: 'POST', |
| 110 | + path: '/api/v0/add-chunked', |
| 111 | + config: { |
| 112 | + payload: { |
| 113 | + parse: false, |
| 114 | + maxBytes: 10048576 |
| 115 | + }, |
| 116 | + handler: (request, reply) => { |
| 117 | + console.log('received') |
| 118 | + console.log(request.headers['content-range']) |
| 119 | + console.log(request.headers['ipfs-chunk-id']) |
| 120 | + console.log(request.headers['ipfs-chunk-name']) |
| 121 | + const boundary = request.headers['ipfs-chunk-boundary'] |
| 122 | + const id = request.headers['ipfs-chunk-name'] // change name to id |
| 123 | + const index = Number(request.headers['ipfs-chunk-id']) |
| 124 | + const file = path.join(filesDir, id) |
| 125 | + const match = request.headers['content-range'].match(/(\d+)-(\d+)\/(\d+|\*)/) |
| 126 | + const ipfs = request.server.app.ipfs |
| 127 | + // if (!match || !match[1] || !match[2] || !match[3]) { |
| 128 | + /* malformed content-range header */ |
| 129 | + // res.send('Bad Request', 400) |
| 130 | + // return; |
| 131 | + // } |
| 132 | + |
| 133 | + const start = parseInt(match[1]) |
| 134 | + const end = parseInt(match[2]) |
| 135 | + const total = parseInt(match[3]) |
| 136 | + // console.log(start, end, total, index, boundary) |
| 137 | + |
| 138 | + let stream = streams[id] |
| 139 | + if (!stream) { |
| 140 | + console.log('create new stream', file) |
| 141 | + stream = fs.createWriteStream(file, {flags: 'a+'}) |
| 142 | + streams[id] = stream |
| 143 | + } |
| 144 | + |
| 145 | + console.log('stream', file) |
| 146 | + let size = 0 |
| 147 | + if (fs.existsSync(file)) { |
| 148 | + size = fs.statSync(file).size |
| 149 | + } |
| 150 | + |
| 151 | + if ((end + 1) === size) { |
| 152 | + /* duplicate chunk */ |
| 153 | + // res.send('Created', 201) |
| 154 | + // return; |
| 155 | + } |
| 156 | + |
| 157 | + if (start !== size) { |
| 158 | + /* missing chunk */ |
| 159 | + // res.send('Bad Request', 400) |
| 160 | + // return; |
| 161 | + } |
| 162 | + |
| 163 | + if (index === -1) { |
| 164 | + // check if size + payload.length === total |
| 165 | + /* all chunks have been received */ |
| 166 | + stream.on('finish', function () { |
| 167 | + console.log('add to ipfs from the file') |
| 168 | + var readStream = fs.createReadStream(file) |
| 169 | + createMultipartStream(readStream, boundary, ipfs, (err, files) => { |
| 170 | + if (err) { |
| 171 | + console.error(err) |
| 172 | + } |
| 173 | + console.log('finished adding to ipfs', files) |
| 174 | + reply({files}) |
| 175 | + }) |
| 176 | + }) |
| 177 | + |
| 178 | + stream.end() |
| 179 | + } else { |
| 180 | + stream.write(request.payload) |
| 181 | + /* this chunk has been processed successfully */ |
| 182 | + reply({ Bytes: request.payload.length }) |
| 183 | + } |
| 184 | + } |
| 185 | + } |
| 186 | + }) |
| 187 | + |
47 | 188 | api.route({
|
48 | 189 | // TODO fix method
|
49 | 190 | method: '*',
|
|
0 commit comments