From d4826e1075039e9ca4cb4bb8feb3919c3df50c4f Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 12:59:47 -0500 Subject: [PATCH 01/20] Logging incoming requests --- .gitignore | 4 + packages/ipfs/package.json | 2 +- packages/ipfs/src/http/api/resources/dag.js | 319 ++++++------- .../src/http/api/resources/files-regular.js | 418 ++++++++++-------- 4 files changed, 409 insertions(+), 334 deletions(-) diff --git a/.gitignore b/.gitignore index a589b3fa49..4d0e24e5fd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +.DS_Store +.connect-deps-cache/ +.connect-deps.json + # Dependency tools package-lock.json yarn.lock diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index f36ba126e8..311a509316 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -98,7 +98,7 @@ "ipfs-http-client": "^42.0.0", "ipfs-http-response": "^0.5.0", "ipfs-mfs": "^1.0.0", - "ipfs-multipart": "^0.3.0", + "ipfs-multipart": "file:/Users/paulcowgill/Code/contract-work/3box/js-ipfs/packages/ipfs/.connect-deps-cache/ipfs-multipart-0.3.0-1583254040397.tgz", "ipfs-repo": "^0.30.1", "ipfs-unixfs": "^0.3.0", "ipfs-unixfs-exporter": "^0.41.0", diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index 8b6ff198ce..c32a93a919 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -1,314 +1,327 @@ -'use strict' - -const CID = require('cids') -const multipart = require('ipfs-multipart') -const mh = require('multihashes') -const Joi = require('@hapi/joi') -const multibase = require('multibase') -const multicodec = require('multicodec') -const Boom = require('@hapi/boom') -const debug = require('debug') -const { - cidToString -} = require('../../../utils/cid') -const all = require('it-all') -const log = debug('ipfs:http-api:dag') -log.error = debug('ipfs:http-api:dag:error') +"use strict"; + +const CID = require("cids"); +const multipart = require("ipfs-multipart"); +const mh = require("multihashes"); +const Joi = require("@hapi/joi"); +const multibase = require("multibase"); +const multicodec = require("multicodec"); +const Boom = require("@hapi/boom"); +const debug = require("debug"); +const { cidToString } = require("../../../utils/cid"); +const all = require("it-all"); +const log = debug("ipfs:http-api:dag"); +log.error = debug("ipfs:http-api:dag:error"); const IpldFormats = { - get [multicodec.RAW] () { - return require('ipld-raw') + get [multicodec.RAW]() { + return require("ipld-raw"); }, - get [multicodec.DAG_PB] () { - return require('ipld-dag-pb') + get [multicodec.DAG_PB]() { + return require("ipld-dag-pb"); }, - get [multicodec.DAG_CBOR] () { - return require('ipld-dag-cbor') + get [multicodec.DAG_CBOR]() { + return require("ipld-dag-cbor"); }, - get [multicodec.BITCOIN_BLOCK] () { - return require('ipld-bitcoin') + get [multicodec.BITCOIN_BLOCK]() { + return require("ipld-bitcoin"); }, - get [multicodec.ETH_ACCOUNT_SNAPSHOT] () { - return require('ipld-ethereum').ethAccountSnapshot + get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { + return require("ipld-ethereum").ethAccountSnapshot; }, - get [multicodec.ETH_BLOCK] () { - return require('ipld-ethereum').ethBlock + get [multicodec.ETH_BLOCK]() { + return require("ipld-ethereum").ethBlock; }, - get [multicodec.ETH_BLOCK_LIST] () { - return require('ipld-ethereum').ethBlockList + get [multicodec.ETH_BLOCK_LIST]() { + return require("ipld-ethereum").ethBlockList; }, - get [multicodec.ETH_STATE_TRIE] () { - return require('ipld-ethereum').ethStateTrie + get [multicodec.ETH_STATE_TRIE]() { + return require("ipld-ethereum").ethStateTrie; }, - get [multicodec.ETH_STORAGE_TRIE] () { - return require('ipld-ethereum').ethStorageTrie + get [multicodec.ETH_STORAGE_TRIE]() { + return require("ipld-ethereum").ethStorageTrie; }, - get [multicodec.ETH_TX] () { - return require('ipld-ethereum').ethTx + get [multicodec.ETH_TX]() { + return require("ipld-ethereum").ethTx; }, - get [multicodec.ETH_TX_TRIE] () { - return require('ipld-ethereum').ethTxTrie + get [multicodec.ETH_TX_TRIE]() { + return require("ipld-ethereum").ethTxTrie; }, - get [multicodec.GIT_RAW] () { - return require('ipld-git') + get [multicodec.GIT_RAW]() { + return require("ipld-git"); }, - get [multicodec.ZCASH_BLOCK] () { - return require('ipld-zcash') + get [multicodec.ZCASH_BLOCK]() { + return require("ipld-zcash"); } -} +}; // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` -exports.parseKey = (argument = 'Argument', name = 'key', quote = "'") => { - return (request) => { +exports.parseKey = (argument = "Argument", name = "key", quote = "'") => { + return request => { if (!request.query.arg) { // for compatibility with go error messages - throw Boom.badRequest(`${argument} ${quote}${name}${quote} is required`) + throw Boom.badRequest(`${argument} ${quote}${name}${quote} is required`); } - let key = request.query.arg.trim() - let path + let key = request.query.arg.trim(); + let path; - if (key.startsWith('/ipfs')) { - key = key.substring(5) + if (key.startsWith("/ipfs")) { + key = key.substring(5); } - const parts = key.split('/') + const parts = key.split("/"); if (parts.length > 1) { - key = parts.shift() - path = `${parts.join('/')}` + key = parts.shift(); + path = `${parts.join("/")}`; } - if (path && path.endsWith('/')) { - path = path.substring(0, path.length - 1) + if (path && path.endsWith("/")) { + path = path.substring(0, path.length - 1); } try { return { [name]: new CID(key), path - } + }; } catch (err) { - log.error(err) - throw Boom.badRequest("invalid 'ipfs ref' path") + log.error(err); + throw Boom.badRequest("invalid 'ipfs ref' path"); } - } -} + }; +}; const encodeBufferKeys = (obj, encoding) => { if (!obj) { - return obj + return obj; } if (Buffer.isBuffer(obj)) { - return obj.toString(encoding) + return obj.toString(encoding); } Object.keys(obj).forEach(key => { if (Buffer.isBuffer(obj)) { - obj[key] = obj[key].toString(encoding) + obj[key] = obj[key].toString(encoding); - return + return; } - if (typeof obj[key] === 'object') { - obj[key] = encodeBufferKeys(obj[key], encoding) + if (typeof obj[key] === "object") { + obj[key] = encodeBufferKeys(obj[key], encoding); } - }) + }); - return obj -} + return obj; +}; exports.get = { validate: { - query: Joi.object().keys({ - 'data-encoding': Joi.string().valid('text', 'base64', 'hex').default('text'), - 'cid-base': Joi.string().valid(...multibase.names) - }).unknown() + query: Joi.object() + .keys({ + "data-encoding": Joi.string() + .valid("text", "base64", "hex") + .default("text"), + "cid-base": Joi.string().valid(...multibase.names) + }) + .unknown() }, // uses common parseKey method that returns a `key` parseArgs: exports.parseKey(), // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler (request, h) { - const { - key, - path - } = request.pre.args - const { ipfs } = request.server.app + async handler(request, h) { + const { key, path } = request.pre.args; + const { ipfs } = request.server.app; - let dataEncoding = request.query['data-encoding'] + let dataEncoding = request.query["data-encoding"]; - if (dataEncoding === 'text') { - dataEncoding = 'utf8' + if (dataEncoding === "text") { + dataEncoding = "utf8"; } - let result + let result; try { - result = await ipfs.dag.get(key, path) + result = await ipfs.dag.get(key, path); } catch (err) { - throw Boom.badRequest(err) + throw Boom.badRequest(err); } - let value = result.value + let value = result.value; if (!Buffer.isBuffer(result.value) && result.value.toJSON) { - value = result.value.toJSON() + value = result.value.toJSON(); } try { - result.value = encodeBufferKeys(value, dataEncoding) + result.value = encodeBufferKeys(value, dataEncoding); } catch (err) { - throw Boom.boomify(err) + throw Boom.boomify(err); } - return h.response(result.value) + return h.response(result.value); } -} +}; exports.put = { validate: { - query: Joi.object().keys({ - format: Joi.string().default('cbor'), - 'input-enc': Joi.string().default('json'), - pin: Joi.boolean(), - hash: Joi.string().valid(...Object.keys(mh.names)).default('sha2-256'), - 'cid-base': Joi.string().valid(...multibase.names) - }).unknown() + query: Joi.object() + .keys({ + format: Joi.string().default("cbor"), + "input-enc": Joi.string().default("json"), + pin: Joi.boolean(), + hash: Joi.string() + .valid(...Object.keys(mh.names)) + .default("sha2-256"), + "cid-base": Joi.string().valid(...multibase.names) + }) + .unknown() }, // pre request handler that parses the args and returns `node` // which is assigned to `request.pre.args` - async parseArgs (request, h) { + async parseArgs(request, h) { if (!request.payload) { - throw Boom.badRequest("File argument 'object data' is required") + throw Boom.badRequest("File argument 'object data' is required"); } - const enc = request.query['input-enc'] + const enc = request.query["input-enc"]; - if (!request.headers['content-type']) { - throw Boom.badRequest("File argument 'object data' is required") + if (!request.headers["content-type"]) { + throw Boom.badRequest("File argument 'object data' is required"); } - let data + let data; for await (const part of multipart(request)) { - if (part.type !== 'file') { - continue + if (part.type !== "file") { + continue; } - data = Buffer.concat(await all(part.content)) + data = Buffer.concat(await all(part.content)); } if (!data) { - throw Boom.badRequest("File argument 'object data' is required") + throw Boom.badRequest("File argument 'object data' is required"); } - let format = request.query.format + let format = request.query.format; - if (format === 'cbor') { - format = 'dag-cbor' + if (format === "cbor") { + format = "dag-cbor"; } - let node + let node; - if (format === 'raw') { - node = data - } else if (enc === 'json') { + if (format === "raw") { + node = data; + } else if (enc === "json") { try { - node = JSON.parse(data.toString()) + node = JSON.parse(data.toString()); } catch (err) { - throw Boom.badRequest('Failed to parse the JSON: ' + err) + throw Boom.badRequest("Failed to parse the JSON: " + err); } } else { - const codec = multicodec[format.toUpperCase().replace(/-/g, '_')] - if (!IpldFormats[codec]) throw new Error(`Missing IPLD format "${codec}"`) - node = await IpldFormats[codec].util.deserialize(data) + const codec = multicodec[format.toUpperCase().replace(/-/g, "_")]; + if (!IpldFormats[codec]) + throw new Error(`Missing IPLD format "${codec}"`); + node = await IpldFormats[codec].util.deserialize(data); } return { node, format, hashAlg: request.query.hash - } + }; }, // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler (request, h) { - const { ipfs } = request.server.app - const { node, format, hashAlg } = request.pre.args - - let cid + async handler(request, h) { + console.log("========"); + console.log("========"); + console.log("========"); + console.log("========"); + console.log("========"); + console.log("Incoming request!"); + console.log("========"); + const { ipfs } = request.server.app; + const { node, format, hashAlg } = request.pre.args; + + let cid; try { cid = await ipfs.dag.put(node, { format: format, hashAlg: hashAlg - }) + }); } catch (err) { - throw Boom.boomify(err, { message: 'Failed to put node' }) + throw Boom.boomify(err, { message: "Failed to put node" }); } if (request.query.pin) { - await ipfs.pin.add(cid) + await ipfs.pin.add(cid); } return h.response({ Cid: { - '/': cidToString(cid, { - base: request.query['cid-base'] + "/": cidToString(cid, { + base: request.query["cid-base"] }) } - }) + }); } -} +}; exports.resolve = { validate: { - query: Joi.object().keys({ - 'cid-base': Joi.string().valid(...multibase.names) - }).unknown() + query: Joi.object() + .keys({ + "cid-base": Joi.string().valid(...multibase.names) + }) + .unknown() }, // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey('argument', 'ref', '"'), + parseArgs: exports.parseKey("argument", "ref", '"'), // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler (request, h) { - const { ref, path } = request.pre.args - const { ipfs } = request.server.app + async handler(request, h) { + const { ref, path } = request.pre.args; + const { ipfs } = request.server.app; // to be consistent with go we need to return the CID to the last node we've traversed // along with the path inside that node as the remainder path try { - let lastCid = ref - let lastRemainderPath = path + let lastCid = ref; + let lastRemainderPath = path; if (path) { - const result = ipfs.dag.resolve(lastCid, path) + const result = ipfs.dag.resolve(lastCid, path); while (true) { - const resolveResult = (await result.next()).value + const resolveResult = (await result.next()).value; if (!CID.isCID(resolveResult.value)) { - break + break; } - lastRemainderPath = resolveResult.remainderPath - lastCid = resolveResult.value + lastRemainderPath = resolveResult.remainderPath; + lastCid = resolveResult.value; } } return h.response({ Cid: { - '/': cidToString(lastCid, { - base: request.query['cid-base'] + "/": cidToString(lastCid, { + base: request.query["cid-base"] }) }, - RemPath: lastRemainderPath || '' - }) + RemPath: lastRemainderPath || "" + }); } catch (err) { - throw Boom.boomify(err) + throw Boom.boomify(err); } } -} +}; diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 5d4e728e5f..e96dce7fb0 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -1,128 +1,148 @@ -'use strict' - -const multipart = require('ipfs-multipart') -const debug = require('debug') -const tar = require('it-tar') -const log = debug('ipfs:http-api:files') -log.error = debug('ipfs:http-api:files:error') -const toIterable = require('stream-to-it') -const Joi = require('@hapi/joi') -const Boom = require('@hapi/boom') -const { PassThrough } = require('stream') -const multibase = require('multibase') -const isIpfs = require('is-ipfs') -const { cidToString } = require('../../../utils/cid') -const { Format } = require('../../../core/components/refs') -const pipe = require('it-pipe') -const all = require('it-all') -const ndjson = require('iterable-ndjson') -const { map } = require('streaming-iterables') -const streamResponse = require('../../utils/stream-response') - -const toBuffer = async function * (source) { +"use strict"; + +const multipart = require("ipfs-multipart"); +const debug = require("debug"); +const tar = require("it-tar"); +const log = debug("ipfs:http-api:files"); +log.error = debug("ipfs:http-api:files:error"); +const toIterable = require("stream-to-it"); +const Joi = require("@hapi/joi"); +const Boom = require("@hapi/boom"); +const { PassThrough } = require("stream"); +const multibase = require("multibase"); +const isIpfs = require("is-ipfs"); +const { cidToString } = require("../../../utils/cid"); +const { Format } = require("../../../core/components/refs"); +const pipe = require("it-pipe"); +const all = require("it-all"); +const ndjson = require("iterable-ndjson"); +const { map } = require("streaming-iterables"); +const streamResponse = require("../../utils/stream-response"); + +const toBuffer = async function*(source) { for await (const chunk of source) { - yield chunk.slice() + yield chunk.slice(); } -} +}; -function numberFromQuery (query, key) { +function numberFromQuery(query, key) { if (query && query[key] !== undefined) { - const value = parseInt(query[key], 10) + const value = parseInt(query[key], 10); if (isNaN(value)) { - return undefined + return undefined; } - return value + return value; } } // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` exports.parseKey = (request, h) => { - const { arg } = request.query + const { arg } = request.query; if (!arg) { - throw Boom.badRequest("Argument 'key' is required") + throw Boom.badRequest("Argument 'key' is required"); } - const isArray = Array.isArray(arg) - const args = isArray ? arg : [arg] + const isArray = Array.isArray(arg); + const args = isArray ? arg : [arg]; for (const arg of args) { - if (!isIpfs.ipfsPath(arg) && !isIpfs.cid(arg) && !isIpfs.ipfsPath('/ipfs/' + arg)) { - throw Boom.badRequest(`invalid ipfs ref path '${arg}'`) + if ( + !isIpfs.ipfsPath(arg) && + !isIpfs.cid(arg) && + !isIpfs.ipfsPath("/ipfs/" + arg) + ) { + throw Boom.badRequest(`invalid ipfs ref path '${arg}'`); } } return { key: isArray ? args : arg, options: { - offset: numberFromQuery(request.query, 'offset'), - length: numberFromQuery(request.query, 'length') + offset: numberFromQuery(request.query, "offset"), + length: numberFromQuery(request.query, "length") } - } -} + }; +}; exports.cat = { // uses common parseKey method that returns a `key` parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler (request, h) { - const { ipfs } = request.server.app - const { key, options } = request.pre.args + handler(request, h) { + const { ipfs } = request.server.app; + const { key, options } = request.pre.args; return streamResponse(request, h, () => ipfs.cat(key, options), { - onError (err) { - err.message = err.message === 'file does not exist' - ? err.message - : 'Failed to cat file: ' + err.message + onError(err) { + err.message = + err.message === "file does not exist" + ? err.message + : "Failed to cat file: " + err.message; } - }) + }); } -} +}; exports.get = { // uses common parseKey method that returns a `key` parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler (request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args - - return streamResponse(request, h, () => pipe( - ipfs.get(key), - async function * (source) { - for await (const file of source) { - const header = { - name: file.path - } - - if (file.content) { - yield { header: { ...header, size: file.size }, body: toBuffer(file.content) } - } else { - yield { header: { ...header, type: 'directory' } } + handler(request, h) { + const { ipfs } = request.server.app; + const { key } = request.pre.args; + + return streamResponse(request, h, () => + pipe( + ipfs.get(key), + async function*(source) { + for await (const file of source) { + const header = { + name: file.path + }; + + if (file.content) { + yield { + header: { ...header, size: file.size }, + body: toBuffer(file.content) + }; + } else { + yield { header: { ...header, type: "directory" } }; + } } - } - }, - tar.pack(), - toBuffer - )) + }, + tar.pack(), + toBuffer + ) + ); } -} +}; exports.add = { validate: { query: Joi.object() .keys({ - 'cid-version': Joi.number().integer().min(0).max(1).default(0), - 'cid-base': Joi.string().valid(...multibase.names), - 'raw-leaves': Joi.boolean(), - 'only-hash': Joi.boolean(), + "cid-version": Joi.number() + .integer() + .min(0) + .max(1) + .default(0), + "cid-base": Joi.string().valid(...multibase.names), + "raw-leaves": Joi.boolean(), + "only-hash": Joi.boolean(), pin: Joi.boolean().default(true), - 'wrap-with-directory': Joi.boolean(), - 'file-import-concurrency': Joi.number().integer().min(0).default(50), - 'block-write-concurrency': Joi.number().integer().min(0).default(10), + "wrap-with-directory": Joi.boolean(), + "file-import-concurrency": Joi.number() + .integer() + .min(0) + .default(50), + "block-write-concurrency": Joi.number() + .integer() + .min(0) + .default(10), chunker: Joi.string(), trickle: Joi.boolean(), preload: Joi.boolean().default(true) @@ -131,58 +151,78 @@ exports.add = { .options({ allowUnknown: true }) }, - handler (request, h) { + handler(request, h) { if (!request.payload) { - throw Boom.badRequest('Array, Buffer, or String is required.') + throw Boom.badRequest("Array, Buffer, or String is required."); } - - const { ipfs } = request.server.app - let filesParsed = false - let currentFileName - const output = new PassThrough() + console.log("========"); + console.log("========"); + console.log("========"); + console.log("========"); + console.log("========"); + console.log("Incoming request!"); + console.log("========"); + // console.log(request.payload); + + const { ipfs } = request.server.app; + let filesParsed = false; + let currentFileName; + const output = new PassThrough(); const progressHandler = bytes => { - output.write(JSON.stringify({ - Name: currentFileName, - Bytes: bytes - }) + '\n') - } + output.write( + JSON.stringify({ + Name: currentFileName, + Bytes: bytes + }) + "\n" + ); + }; pipe( multipart(request), - async function * (source) { + async function*(source) { + console.log("-------"); + console.log({ source }); for await (const entry of source) { - currentFileName = entry.name || 'unknown' + console.log("-------"); + // console.log({ entry }); + currentFileName = entry.name || "unknown"; + + if (entry.type === "file") { + filesParsed = true; - if (entry.type === 'file') { - filesParsed = true + // Just for logging + for await (const chunk of entry.content) { + console.log({ chunk }); + } + // end of extra logging code yield { path: entry.name, content: entry.content, mode: entry.mode, mtime: entry.mtime - } + }; } - if (entry.type === 'directory') { - filesParsed = true + if (entry.type === "directory") { + filesParsed = true; yield { path: entry.name, mode: entry.mode, mtime: entry.mtime - } + }; } } }, - function (source) { + function(source) { return ipfs.add(source, { - cidVersion: request.query['cid-version'], - rawLeaves: request.query['raw-leaves'], + cidVersion: request.query["cid-version"], + rawLeaves: request.query["raw-leaves"], progress: request.query.progress ? progressHandler : () => {}, - onlyHash: request.query['only-hash'], + onlyHash: request.query["only-hash"], hashAlg: request.query.hash, - wrapWithDirectory: request.query['wrap-with-directory'], + wrapWithDirectory: request.query["wrap-with-directory"], pin: request.query.pin, chunker: request.query.chunker, trickle: request.query.trickle, @@ -192,72 +232,78 @@ exports.add = { // at a time from a http request and we have to consume it completely // before we can read the next file fileImportConcurrency: 1, - blockWriteConcurrency: request.query['block-write-concurrency'] - }) + blockWriteConcurrency: request.query["block-write-concurrency"] + }); }, map(file => { const entry = { Name: file.path, - Hash: cidToString(file.cid, { base: request.query['cid-base'] }), + Hash: cidToString(file.cid, { base: request.query["cid-base"] }), Size: file.size, - Mode: file.mode === undefined ? undefined : file.mode.toString(8).padStart(4, '0') - } + Mode: + file.mode === undefined + ? undefined + : file.mode.toString(8).padStart(4, "0") + }; if (file.mtime) { - entry.Mtime = file.mtime.secs - entry.MtimeNsecs = file.mtime.nsecs + entry.Mtime = file.mtime.secs; + entry.MtimeNsecs = file.mtime.nsecs; } - return entry + return entry; }), ndjson.stringify, toIterable.sink(output) ) .then(() => { if (!filesParsed) { - throw new Error("File argument 'data' is required.") + throw new Error("File argument 'data' is required."); } }) .catch(err => { if (!filesParsed) { - output.write(' ') + output.write(" "); } request.raw.res.addTrailers({ - 'X-Stream-Error': JSON.stringify({ + "X-Stream-Error": JSON.stringify({ Message: err.message, Code: 0 }) - }) + }); }) .then(() => { - output.end() - }) - - return h.response(output) - .header('x-chunked-output', '1') - .header('content-type', 'application/json') - .header('Trailer', 'X-Stream-Error') + output.end(); + }); + + return h + .response(output) + .header("x-chunked-output", "1") + .header("content-type", "application/json") + .header("Trailer", "X-Stream-Error"); } -} +}; exports.ls = { validate: { - query: Joi.object().keys({ - 'cid-base': Joi.string().valid(...multibase.names), - stream: Joi.boolean() - }).unknown() + query: Joi.object() + .keys({ + "cid-base": Joi.string().valid(...multibase.names), + stream: Joi.boolean() + }) + .unknown() }, // uses common parseKey method that returns a `key` parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler (request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args - const recursive = request.query && request.query.recursive === 'true' - const cidBase = request.query['cid-base'] + async handler(request, h) { + const { ipfs } = request.server.app; + const { key } = request.pre.args; + const recursive = request.query && request.query.recursive === "true"; + const cidBase = request.query["cid-base"]; const mapLink = link => { const output = { @@ -266,94 +312,106 @@ exports.ls = { Size: link.size, Type: toTypeCode(link.type), Depth: link.depth, - Mode: link.mode.toString(8).padStart(4, '0') - } + Mode: link.mode.toString(8).padStart(4, "0") + }; if (link.mtime) { - output.Mtime = link.mtime.secs + output.Mtime = link.mtime.secs; if (link.mtime.nsecs !== null && link.mtime.nsecs !== undefined) { - output.MtimeNsecs = link.mtime.nsecs + output.MtimeNsecs = link.mtime.nsecs; } } - return output - } + return output; + }; if (!request.query.stream) { - let links + let links; try { - links = await all(ipfs.ls(key, { recursive })) + links = await all(ipfs.ls(key, { recursive })); } catch (err) { - throw Boom.boomify(err, { message: 'Failed to list dir' }) + throw Boom.boomify(err, { message: "Failed to list dir" }); } - return h.response({ Objects: [{ Hash: key, Links: links.map(mapLink) }] }) + return h.response({ + Objects: [{ Hash: key, Links: links.map(mapLink) }] + }); } - return streamResponse(request, h, () => pipe( - ipfs.ls(key, { recursive }), - map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), - ndjson.stringify - )) + return streamResponse(request, h, () => + pipe( + ipfs.ls(key, { recursive }), + map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), + ndjson.stringify + ) + ); } -} +}; -function toTypeCode (type) { +function toTypeCode(type) { switch (type) { - case 'dir': - return 1 - case 'file': - return 2 + case "dir": + return 1; + case "file": + return 2; default: - return 0 + return 0; } } exports.refs = { validate: { - query: Joi.object().keys({ - recursive: Joi.boolean().default(false), - format: Joi.string().default(Format.default), - edges: Joi.boolean().default(false), - unique: Joi.boolean().default(false), - 'max-depth': Joi.number().integer().min(-1) - }).unknown() + query: Joi.object() + .keys({ + recursive: Joi.boolean().default(false), + format: Joi.string().default(Format.default), + edges: Joi.boolean().default(false), + unique: Joi.boolean().default(false), + "max-depth": Joi.number() + .integer() + .min(-1) + }) + .unknown() }, // uses common parseKey method that returns a `key` parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler (request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args + handler(request, h) { + const { ipfs } = request.server.app; + const { key } = request.pre.args; const options = { recursive: request.query.recursive, format: request.query.format, edges: request.query.edges, unique: request.query.unique, - maxDepth: request.query['max-depth'] - } - - return streamResponse(request, h, () => pipe( - ipfs.refs(key, options), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - )) + maxDepth: request.query["max-depth"] + }; + + return streamResponse(request, h, () => + pipe( + ipfs.refs(key, options), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ); } -} +}; exports.refs.local = { // main route handler - handler (request, h) { - const { ipfs } = request.server.app - - return streamResponse(request, h, () => pipe( - ipfs.refs.local(), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - )) + handler(request, h) { + const { ipfs } = request.server.app; + + return streamResponse(request, h, () => + pipe( + ipfs.refs.local(), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ); } -} +}; From 8f44ff13fcf161001834102022d74d8573aceffc Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 13:30:07 -0500 Subject: [PATCH 02/20] Formatting fixes --- .gitignore | 1 + examples/browser-parceljs/package.json | 48 +- examples/custom-libp2p/package.json | 38 +- package.json | 578 ++++++++----- packages/ipfs/src/http/api/resources/dag.js | 568 ++++++------- .../src/http/api/resources/files-regular.js | 776 +++++++++--------- 6 files changed, 1081 insertions(+), 928 deletions(-) diff --git a/.gitignore b/.gitignore index 4d0e24e5fd..40b58eb613 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ .DS_Store .connect-deps-cache/ .connect-deps.json +prettier.config.js # Dependency tools package-lock.json diff --git a/examples/browser-parceljs/package.json b/examples/browser-parceljs/package.json index 9a2aefbb4d..d8c693d8f1 100644 --- a/examples/browser-parceljs/package.json +++ b/examples/browser-parceljs/package.json @@ -1,34 +1,18 @@ { - "name": "example-browser-parceljs", - "version": "1.0.0", - "description": "", - "main": "index.js", - "private": true, - "browserslist": [ - "last 2 Chrome versions" - ], - "scripts": { - "clean": "rm -rf ./dist", - "lint": "standard public/**/*.js", - "start": "parcel public/index.html", - "build": "parcel build public/index.html --public-url ./", - "test": "test-ipfs-example" - }, - "keywords": [], - "author": "", - "license": "ISC", - "dependencies": { - "ipfs": "^0.41.0" - }, - "devDependencies": { - "@babel/cli": "^7.1.5", - "@babel/core": "^7.1.6", - "@babel/preset-env": "^7.1.6", - "babel-plugin-syntax-async-functions": "^6.13.0", - "babel-plugin-transform-regenerator": "^6.26.0", - "babel-polyfill": "^6.26.0", - "parcel-bundler": "^1.10.3", - "standard": "^13.1.0", - "test-ipfs-example": "^1.0.0" - } + "name": "example-browser-parceljs", + "version": "1.0.0", + "description": "", + "main": "index.js", + "private": true, + "browserslist": [ + "last 2 Chrome versions" + ], + "keywords": [], + "author": "", + "license": "ISC", + "devDependencies": { + "@babel/core": "^7.1.6", + "@babel/preset-env": "^7.1.6", + "parcel-bundler": "^1.10.3" + } } diff --git a/examples/custom-libp2p/package.json b/examples/custom-libp2p/package.json index be3e3dce17..534517d043 100644 --- a/examples/custom-libp2p/package.json +++ b/examples/custom-libp2p/package.json @@ -1,27 +1,15 @@ { - "name": "example-custom-libp2p", - "version": "0.1.0", - "description": "Customizing your libp2p node", - "main": "index.js", - "private": true, - "scripts": { - "start": "node index.js", - "test": "test-ipfs-example" - }, - "license": "MIT", - "dependencies": { - "ipfs": "^0.41.0", - "libp2p": "^0.27.0-rc.0", - "libp2p-bootstrap": "^0.10.3", - "libp2p-kad-dht": "^0.18.3", - "libp2p-mdns": "^0.13.1", - "libp2p-mplex": "^0.9.3", - "libp2p-secio": "^0.12.2", - "libp2p-spdy": "^0.13.3", - "libp2p-tcp": "^0.14.3" - }, - "devDependencies": { - "execa": "^3.2.0", - "test-ipfs-example": "^1.0.0" - } + "name": "example-custom-libp2p", + "version": "0.1.0", + "description": "Customizing your libp2p node", + "main": "index.js", + "private": true, + "license": "MIT", + "dependencies": { + "libp2p": "^0.27.0-rc.0", + "libp2p-bootstrap": "^0.10.3", + "libp2p-mdns": "^0.13.1", + "libp2p-secio": "^0.12.2", + "libp2p-tcp": "^0.14.3" + } } diff --git a/package.json b/package.json index 3b6f085636..fd0861cc6c 100644 --- a/package.json +++ b/package.json @@ -1,207 +1,375 @@ { - "name": "js-ipfs", - "version": "1.0.0", - "description": "JavaScript implementation of the IPFS specification", - "scripts": { - "postinstall": "lerna bootstrap", - "reset": "lerna run clean && rm -rf examples/*/node_modules && rm -rf packages/*/node_modules node_modules", - "test": "lerna run test", - "test:node": "lerna run test:node", - "test:browser": "lerna run test:browser", - "test:webworker": "lerna run test:webworker", - "test:electron": "lerna run test:electron", - "test:electron-main": "lerna run test:electron-main", - "test:electron-renderer": "lerna run test:electron-renderer", - "test:cli": "lerna run test:cli", - "test:interop:node": "lerna run test:interop:node", - "test:interop:browser": "lerna run test:interop:browser", - "test:interop:electron-main": "lerna run test:interop:electron-main", - "test:interop:electron-renderer": "lerna run test:interop:electron-renderer", - "coverage": "lerna run coverage", - "build": "lerna run build", - "clean": "lerna run clean", - "lint": "lerna run lint", - "dep-check": "lerna run dep-check", - "release": "npm run update-contributors && lerna publish", - "release:rc": "lerna publish --canary --preid rc --dist-tag next", - "update-contributors": "aegir release --lint=false --test=false --bump=false --build=false --changelog=false --commit=false --tag=false --push=false --ghrelease=false --docs=false --publish=false" - }, - "devDependencies": { - "lerna": "^3.20.2" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ipfs/js-ipfs.git" - }, - "private": true, - "contributors": [ - "David Dias ", - "Alan Shaw ", - "achingbrain ", - "Friedel Ziegelmayer ", - "Juan Batiz-Benet ", - "Hugo Dias ", - "Vasco Santos ", - "Henrique Dias ", - "Volker Mische ", - "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ ", - "Stephen Whitmore ", - "Francisco Baio Dias ", - "Matt Bell ", - "Richard Schneider ", - "Jacob Heun ", - "Marcin Rataj ", - "Pedro Teixeira ", - "Travis Person ", - "Kristoffer Ström ", - "Dmitriy Ryajov ", - "nginnever ", - "Richard Littauer ", - "Oli Evans ", - "dirkmc ", - "Diogo Silva ", - "Connor Keenan ", - "Pedro Santos ", - "Harlan T Wood ", - "Pascal Precht ", - "Maciej Krüger ", - "Michael Garvin ", - "Steven Allen ", - "Michael Muré ", - "João Antunes ", - "Enrico Marino ", - "Christian Couder ", - "Rob Brackett ", - "Mithgol ", - "Prabhakar Poudel ", - "Sangwon Hong ", - "Jonathan ", - "Gavin McDermott ", - "Mikeal Rogers ", - "Dzmitry Das ", - "Andrew de Andrade ", - "Paulo Rodrigues ", - "haad ", - "Andrew Nesbitt ", - "Marius Darila ", - "Yahya ", - "Alex Mingoia ", - "RasmusErik Voel Jensen ", - "Ryan Bell ", - "Jeromy ", - "Gorka Ludlow ", - "Dan Ordille ", - "Matt Ober ", - "samuli ", - "Maxime Lathuilière ", - "Andrey ", - "shunkin ", - "Jim Pick ", - "tcme ", - "Jonybang ", - "Raoul Millais ", - "Georgios Rassias ", - "Portia Burton ", - "Antonio Tenorio-Fornés ", - "Kevin Simper ", - "Kevin Wang ", - "Irakli Gozalishvili ", - "David Gilbertson ", - "Максим Ильин ", - "Nuno Nogueira ", - "Sid Harder ", - "seungwon-kang ", - "Arpit Agarwal <93arpit@gmail.com>", - "kevingzhang ", - "leekt216 ", - "noah the goodra ", - "priecint ", - "ron litzenberger ", - "sarthak khandelwal ", - "Paul Cowgill ", - "Adam Uhlíř ", - "Alex North ", - "André Cruz ", - "Ayush Mahajan ", - "Bernard Mordan ", - "Brian Vander Schaaf ", - "Bruno Barbieri ", - "Bruno Zell ", - "CHEVALAY JOSSELIN ", - "Caio Gondim ", - "Chance Hudson ", - "Dafeng ", - "Daniel Buchner ", - "Daniel Constantin ", - "Daniel J. O'Quinn ", - "Daniela Borges Matos de Carvalho ", - "Danny ", - "David ", - "David Braun ", - "David da Silva ", - "Davide Icardi ", - "Dietrich Ayala ", - "Dmitry Nikulin ", - "Dominic Della Valle ", - "Donatas Stundys ", - "Faheel Ahmad ", - "Felix Yan ", - "Fil ", - "Filip Š ", - "Gabriel Garrido Calvo ", - "Giles ", - "Grant Herman ", - "Henry Rodrick ", - "Heo Sangmin ", - "Holodisc ", - "Jacob Karlsson ", - "Jade Meskill ", - "James Halliday ", - "Jason Carver ", - "Jessica Schilling ", - "Joe Turgeon ", - "Joel Gustafson ", - "Johannes Wikner ", - "Jon Schlinkert ", - "Jorropo ", - "Lars Gierth ", - "Lukas Drgon ", - "Marcus Bernales ", - "Mark Robert Henderson ", - "Mat Kelly ", - "Matt Zumwalt ", - "Michael Bradley ", - "Michelle Lee ", - "Mitar ", - "Mohamed Abdulaziz ", - "Mounish Sai ", - "Nick Poulden ", - "Nicolás Santángelo ", - "Níckolas Goline ", - "Orie Steele ", - "Oskar Nyberg ", - "Pau Ramon Revilla ", - "0xflotus <0xflotus@gmail.com>", - "Pete Thomas ", - "Rod Keys ", - "Roman Khafizianov ", - "SidHarder ", - "Tapasweni Pathak ", - "Tara Vancil ", - "Terence Pae ", - "Thiago Delgado ", - "Uroš Jurglič ", - "Victor Bjelkholm ", - "Vincent Martin ", - "Vutsal Singhal ", - "Yole ", - "Zhiyuan Lin ", - "bitspill ", - "datafatmunger ", - "dmitriy ryajov ", - "elsehow ", - "ethers ", - "hapsody ", - "isan_rivkin " - ] + "name": "js-ipfs", + "version": "1.0.0", + "description": "JavaScript implementation of the IPFS specification", + "devDependencies": { + "lerna": "^3.20.2" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/js-ipfs.git" + }, + "private": true, + "contributors": [ + "David Dias ", + "Alan Shaw ", + "achingbrain ", + "Friedel Ziegelmayer ", + "Juan Batiz-Benet ", + "Hugo Dias ", + "Vasco Santos ", + "Henrique Dias ", + "Volker Mische ", + "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ ", + "Stephen Whitmore ", + "Francisco Baio Dias ", + "Matt Bell ", + "Richard Schneider ", + "Jacob Heun ", + "Marcin Rataj ", + "Pedro Teixeira ", + "Travis Person ", + "Kristoffer Ström ", + "Dmitriy Ryajov ", + "nginnever ", + "Richard Littauer ", + "Oli Evans ", + "dirkmc ", + "Diogo Silva ", + "Connor Keenan ", + "Pedro Santos ", + "Harlan T Wood ", + "Pascal Precht ", + "Maciej Krüger ", + "Michael Garvin ", + "Steven Allen ", + "Michael Muré ", + "João Antunes ", + "Enrico Marino ", + "Christian Couder ", + "Rob Brackett ", + "Mithgol ", + "Prabhakar Poudel ", + "Sangwon Hong ", + "Jonathan ", + "Gavin McDermott ", + "Mikeal Rogers ", + "Dzmitry Das ", + "Andrew de Andrade ", + "Paulo Rodrigues ", + "haad ", + "Andrew Nesbitt ", + "Marius Darila ", + "Yahya ", + "Alex Mingoia ", + "RasmusErik Voel Jensen ", + "Ryan Bell ", + "Jeromy ", + "Gorka Ludlow ", + "Dan Ordille ", + "Matt Ober ", + "samuli ", + "Maxime Lathuilière ", + "Andrey ", + "shunkin ", + "Jim Pick ", + "tcme ", + "Jonybang ", + "Raoul Millais ", + "Georgios Rassias ", + "Portia Burton ", + "Antonio Tenorio-Fornés ", + "Kevin Simper ", + "Kevin Wang ", + "Irakli Gozalishvili ", + "David Gilbertson ", + "Максим Ильин ", + "Nuno Nogueira ", + "Sid Harder ", + "seungwon-kang ", + "Arpit Agarwal <93arpit@gmail.com>", + "kevingzhang ", + "leekt216 ", + "noah the goodra ", + "priecint ", + "ron litzenberger ", + "sarthak khandelwal ", + "Paul Cowgill ", + "Adam Uhlíř ", + "Alex North ", + "André Cruz ", + "Ayush Mahajan ", + "Bernard Mordan ", + "Brian Vander Schaaf ", + "Bruno Barbieri ", + "Bruno Zell ", + "CHEVALAY JOSSELIN ", + "Caio Gondim ", + "Chance Hudson ", + "Dafeng ", + "Daniel Buchner ", + "Daniel Constantin ", + "Daniel J. O'Quinn ", + "Daniela Borges Matos de Carvalho ", + "Danny ", + "David ", + "David Braun ", + "David da Silva ", + "Davide Icardi ", + "Dietrich Ayala ", + "Dmitry Nikulin ", + "Dominic Della Valle ", + "Donatas Stundys ", + "Faheel Ahmad ", + "Felix Yan ", + "Fil ", + "Filip Š ", + "Gabriel Garrido Calvo ", + "Giles ", + "Grant Herman ", + "Henry Rodrick ", + "Heo Sangmin ", + "Holodisc ", + "Jacob Karlsson ", + "Jade Meskill ", + "James Halliday ", + "Jason Carver ", + "Jessica Schilling ", + "Joe Turgeon ", + "Joel Gustafson ", + "Johannes Wikner ", + "Jon Schlinkert ", + "Jorropo ", + "Lars Gierth ", + "Lukas Drgon ", + "Marcus Bernales ", + "Mark Robert Henderson ", + "Mat Kelly ", + "Matt Zumwalt ", + "Michael Bradley ", + "Michelle Lee ", + "Mitar ", + "Mohamed Abdulaziz ", + "Mounish Sai ", + "Nick Poulden ", + "Nicolás Santángelo ", + "Níckolas Goline ", + "Orie Steele ", + "Oskar Nyberg ", + "Pau Ramon Revilla ", + "0xflotus <0xflotus@gmail.com>", + "Pete Thomas ", + "Rod Keys ", + "Roman Khafizianov ", + "SidHarder ", + "Tapasweni Pathak ", + "Tara Vancil ", + "Terence Pae ", + "Thiago Delgado ", + "Uroš Jurglič ", + "Victor Bjelkholm ", + "Vincent Martin ", + "Vutsal Singhal ", + "Yole ", + "Zhiyuan Lin ", + "bitspill ", + "datafatmunger ", + "dmitriy ryajov ", + "elsehow ", + "ethers ", + "hapsody ", + "isan_rivkin " + ], + "dependencies": { + "@babel/cli": "^7.1.5", + "@babel/core": "^7.2.2", + "@babel/preset-env": "^7.2.3", + "@babel/preset-react": "^7.0.0", + "@hapi/ammo": "^3.1.2", + "@hapi/boom": "^7.4.3", + "@hapi/content": "^4.1.0", + "@hapi/hapi": "^18.4.0", + "@hapi/joi": "^15.1.0", + "@vue/cli-plugin-babel": "^3.9.0", + "@vue/cli-plugin-eslint": "^3.9.0", + "@vue/cli-service": "^3.9.0", + "abort-controller": "^3.0.0", + "aegir": "^21.3.0", + "any-signal": "^1.1.0", + "array-shuffle": "^1.0.1", + "async": "^3.1.0", + "babel-eslint": "^10.0.1", + "babel-loader": "^8.0.5", + "babel-plugin-syntax-async-functions": "^6.13.0", + "babel-plugin-transform-regenerator": "^6.26.0", + "babel-polyfill": "^6.26.0", + "base64url": "^3.0.1", + "bignumber.js": "^9.0.0", + "binary-querystring": "^0.1.2", + "bl": "^4.0.0", + "browser-process-platform": "^0.1.1", + "browserify": "^16.2.3", + "bs58": "^4.0.1", + "buffer": "^5.4.2", + "byteman": "^1.3.5", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "chai-things": "^0.2.0", + "chromedriver": "^79.0.0", + "cid-tool": "^0.4.0", + "cids": "^0.7.3", + "class-is": "^1.1.0", + "clear-module": "^4.0.0", + "concat-stream": "^2.0.0", + "copy-webpack-plugin": "^5.0.4", + "core-js": "^2.6.5", + "cross-env": "^7.0.0", + "dag-cbor-links": "^1.3.2", + "datastore-core": "^0.7.0", + "datastore-fs": "^0.9.1", + "datastore-level": "^0.14.1", + "datastore-pubsub": "^0.3.0", + "debug": "^4.1.0", + "delay": "^4.3.0", + "detect-node": "^2.0.4", + "detect-webworker": "^1.0.0", + "dirty-chai": "^2.0.1", + "dlv": "^1.1.3", + "dot-prop": "^5.0.0", + "electron": "^6.0.0", + "electron-rebuild": "^1.8.4", + "err-code": "^2.0.0", + "eslint": "^5.16.0", + "eslint-plugin-vue": "^5.0.0", + "execa": "^3.2.0", + "file-type": "^12.0.1", + "fnv1a": "^1.0.1", + "form-data": "^3.0.0", + "fs-extra": "^8.1.0", + "get-folder-size": "^2.0.0", + "go-ipfs-dep": "0.4.23-3", + "hamt-sharding": "^1.0.0", + "hapi-pino": "^6.1.0", + "hashlru": "^2.3.0", + "hat": "0.0.3", + "html-webpack-plugin": "^3.2.0", + "http-server": "^0.11.1", + "interface-datastore": "^0.8.0", + "ipfs-bitswap": "^0.27.1", + "ipfs-block": "^0.8.1", + "ipfs-block-service": "^0.16.0", + "ipfs-http-response": "^0.5.0", + "ipfs-multipart": "file:/Users/paulcowgill/Code/contract-work/3box/js-ipfs/packages/ipfs/.connect-deps-cache/ipfs-multipart-0.3.0-1583254040397.tgz", + "ipfs-pubsub-room": "^2.0.1", + "ipfs-repo": "^0.30.1", + "ipfs-unixfs": "^1.0.0", + "ipfs-unixfs-exporter": "^1.0.1", + "ipfs-unixfs-importer": "^1.0.1", + "ipfsd-ctl": "^3.0.0", + "ipld": "^0.25.0", + "ipld-bitcoin": "^0.3.0", + "ipld-dag-cbor": "^0.15.1", + "ipld-dag-pb": "^0.18.2", + "ipld-ethereum": "^4.0.0", + "ipld-git": "^0.5.0", + "ipld-raw": "^4.0.1", + "ipld-zcash": "^0.4.0", + "ipns": "^0.7.0", + "is-domain-name": "^1.0.1", + "is-electron": "^2.2.0", + "is-ipfs": "^0.6.1", + "it-all": "^1.0.1", + "it-concat": "^1.0.0", + "it-drain": "^1.0.0", + "it-first": "^1.0.1", + "it-glob": "0.0.7", + "it-last": "^1.0.1", + "it-multipart": "^1.0.1", + "it-pipe": "^1.1.0", + "it-pushable": "^1.3.1", + "it-tar": "^1.2.1", + "it-to-stream": "^0.1.1", + "iterable-ndjson": "^1.1.0", + "joi-browser": "^13.4.0", + "jsondiffpatch": "^0.3.11", + "just-safe-set": "^2.1.0", + "ky": "^0.15.0", + "ky-universal": "^0.3.0", + "libp2p": "^0.27.2", + "libp2p-bootstrap": "^0.10.2", + "libp2p-crypto": "^0.17.1", + "libp2p-delegated-content-routing": "^0.4.3", + "libp2p-delegated-peer-routing": "^0.4.1", + "libp2p-floodsub": "^0.20.0", + "libp2p-gossipsub": "^0.2.3", + "libp2p-kad-dht": "^0.18.3", + "libp2p-keychain": "^0.6.0", + "libp2p-mdns": "^0.13.0", + "libp2p-mplex": "^0.9.3", + "libp2p-record": "^0.7.0", + "libp2p-secio": "^0.12.1", + "libp2p-spdy": "^0.13.3", + "libp2p-tcp": "^0.14.2", + "libp2p-webrtc-star": "^0.17.6", + "libp2p-websockets": "^0.13.3", + "mafmt": "^7.0.0", + "memdown": "^5.1.0", + "merge-options": "^2.0.0", + "mime-sniffer": "~0.0.3", + "mortice": "^2.0.0", + "multiaddr": "^7.2.1", + "multiaddr-to-uri": "^5.1.0", + "multibase": "^0.6.0", + "multicodec": "^1.0.0", + "multihashes": "^0.4.14", + "multihashing-async": "^0.8.0", + "ncp": "^2.0.0", + "nightwatch": "^1.2.4", + "nock": "^11.7.2", + "nyc": "^15.0.0", + "p-defer": "^3.0.0", + "p-event": "^4.1.0", + "p-map": "^3.0.0", + "p-queue": "^6.1.0", + "parcel-bundler": "^1.6.2", + "parse-duration": "^0.1.2", + "peer-id": "^0.13.5", + "peer-info": "^0.17.0", + "pretty-bytes": "^5.3.0", + "progress": "^2.0.1", + "prom-client": "^11.5.3", + "prometheus-gc-stats": "^0.6.0", + "protons": "^1.0.1", + "qs": "^6.5.2", + "react": "^16.7.0", + "react-dom": "^16.7.0", + "react-hot-loader": "^4.8.8", + "react-scripts": "^3.2.0", + "readable-stream": "^3.4.0", + "request": "^2.88.0", + "rimraf": "^3.0.0", + "semver": "^7.1.2", + "sinon": "^8.0.4", + "standard": "^13.1.0", + "stream-to-it": "^0.2.0", + "stream-to-promise": "^2.2.0", + "streaming-iterables": "^4.1.1", + "string-argv": "^0.3.1", + "temp": "^0.9.0", + "temp-write": "^4.0.0", + "terser-webpack-plugin": "^1.2.1", + "timeout-abort-controller": "^1.1.0", + "update-notifier": "^4.0.0", + "uri-to-multiaddr": "^3.0.2", + "varint": "^5.0.0", + "videostream": "^3.2.0", + "vue": "^2.6.10", + "vue-template-compiler": "^2.6.10", + "webpack": "^4.28.4", + "webpack-cli": "^3.0.8", + "webpack-dev-server": "^3.1.14", + "which": "^2.0.1", + "yargs": "^15.1.0", + "yargs-promise": "^1.1.0" + } } diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index c32a93a919..13e554b996 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -1,327 +1,329 @@ -"use strict"; - -const CID = require("cids"); -const multipart = require("ipfs-multipart"); -const mh = require("multihashes"); -const Joi = require("@hapi/joi"); -const multibase = require("multibase"); -const multicodec = require("multicodec"); -const Boom = require("@hapi/boom"); -const debug = require("debug"); -const { cidToString } = require("../../../utils/cid"); -const all = require("it-all"); -const log = debug("ipfs:http-api:dag"); -log.error = debug("ipfs:http-api:dag:error"); +'use strict' + +const CID = require('cids') +const multipart = require('ipfs-multipart') +const mh = require('multihashes') +const Joi = require('@hapi/joi') +const multibase = require('multibase') +const multicodec = require('multicodec') +const Boom = require('@hapi/boom') +const debug = require('debug') +const { cidToString } = require('../../../utils/cid') +const all = require('it-all') +const log = debug('ipfs:http-api:dag') +log.error = debug('ipfs:http-api:dag:error') const IpldFormats = { - get [multicodec.RAW]() { - return require("ipld-raw"); - }, - get [multicodec.DAG_PB]() { - return require("ipld-dag-pb"); - }, - get [multicodec.DAG_CBOR]() { - return require("ipld-dag-cbor"); - }, - get [multicodec.BITCOIN_BLOCK]() { - return require("ipld-bitcoin"); - }, - get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { - return require("ipld-ethereum").ethAccountSnapshot; - }, - get [multicodec.ETH_BLOCK]() { - return require("ipld-ethereum").ethBlock; - }, - get [multicodec.ETH_BLOCK_LIST]() { - return require("ipld-ethereum").ethBlockList; - }, - get [multicodec.ETH_STATE_TRIE]() { - return require("ipld-ethereum").ethStateTrie; - }, - get [multicodec.ETH_STORAGE_TRIE]() { - return require("ipld-ethereum").ethStorageTrie; - }, - get [multicodec.ETH_TX]() { - return require("ipld-ethereum").ethTx; - }, - get [multicodec.ETH_TX_TRIE]() { - return require("ipld-ethereum").ethTxTrie; - }, - get [multicodec.GIT_RAW]() { - return require("ipld-git"); - }, - get [multicodec.ZCASH_BLOCK]() { - return require("ipld-zcash"); - } -}; + get [multicodec.RAW]() { + return require('ipld-raw') + }, + get [multicodec.DAG_PB]() { + return require('ipld-dag-pb') + }, + get [multicodec.DAG_CBOR]() { + return require('ipld-dag-cbor') + }, + get [multicodec.BITCOIN_BLOCK]() { + return require('ipld-bitcoin') + }, + get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { + return require('ipld-ethereum').ethAccountSnapshot + }, + get [multicodec.ETH_BLOCK]() { + return require('ipld-ethereum').ethBlock + }, + get [multicodec.ETH_BLOCK_LIST]() { + return require('ipld-ethereum').ethBlockList + }, + get [multicodec.ETH_STATE_TRIE]() { + return require('ipld-ethereum').ethStateTrie + }, + get [multicodec.ETH_STORAGE_TRIE]() { + return require('ipld-ethereum').ethStorageTrie + }, + get [multicodec.ETH_TX]() { + return require('ipld-ethereum').ethTx + }, + get [multicodec.ETH_TX_TRIE]() { + return require('ipld-ethereum').ethTxTrie + }, + get [multicodec.GIT_RAW]() { + return require('ipld-git') + }, + get [multicodec.ZCASH_BLOCK]() { + return require('ipld-zcash') + }, +} // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` -exports.parseKey = (argument = "Argument", name = "key", quote = "'") => { - return request => { - if (!request.query.arg) { - // for compatibility with go error messages - throw Boom.badRequest(`${argument} ${quote}${name}${quote} is required`); - } +exports.parseKey = (argument = 'Argument', name = 'key', quote = "'") => { + return request => { + if (!request.query.arg) { + // for compatibility with go error messages + throw Boom.badRequest( + `${argument} ${quote}${name}${quote} is required` + ) + } - let key = request.query.arg.trim(); - let path; + let key = request.query.arg.trim() + let path - if (key.startsWith("/ipfs")) { - key = key.substring(5); - } + if (key.startsWith('/ipfs')) { + key = key.substring(5) + } - const parts = key.split("/"); + const parts = key.split('/') - if (parts.length > 1) { - key = parts.shift(); - path = `${parts.join("/")}`; - } + if (parts.length > 1) { + key = parts.shift() + path = `${parts.join('/')}` + } - if (path && path.endsWith("/")) { - path = path.substring(0, path.length - 1); - } + if (path && path.endsWith('/')) { + path = path.substring(0, path.length - 1) + } - try { - return { - [name]: new CID(key), - path - }; - } catch (err) { - log.error(err); - throw Boom.badRequest("invalid 'ipfs ref' path"); + try { + return { + [name]: new CID(key), + path, + } + } catch (err) { + log.error(err) + throw Boom.badRequest("invalid 'ipfs ref' path") + } } - }; -}; +} const encodeBufferKeys = (obj, encoding) => { - if (!obj) { - return obj; - } - - if (Buffer.isBuffer(obj)) { - return obj.toString(encoding); - } + if (!obj) { + return obj + } - Object.keys(obj).forEach(key => { if (Buffer.isBuffer(obj)) { - obj[key] = obj[key].toString(encoding); - - return; + return obj.toString(encoding) } - if (typeof obj[key] === "object") { - obj[key] = encodeBufferKeys(obj[key], encoding); - } - }); + Object.keys(obj).forEach(key => { + if (Buffer.isBuffer(obj)) { + obj[key] = obj[key].toString(encoding) + + return + } + + if (typeof obj[key] === 'object') { + obj[key] = encodeBufferKeys(obj[key], encoding) + } + }) - return obj; -}; + return obj +} exports.get = { - validate: { - query: Joi.object() - .keys({ - "data-encoding": Joi.string() - .valid("text", "base64", "hex") - .default("text"), - "cid-base": Joi.string().valid(...multibase.names) - }) - .unknown() - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey(), - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { key, path } = request.pre.args; - const { ipfs } = request.server.app; - - let dataEncoding = request.query["data-encoding"]; - - if (dataEncoding === "text") { - dataEncoding = "utf8"; - } + validate: { + query: Joi.object() + .keys({ + 'data-encoding': Joi.string() + .valid('text', 'base64', 'hex') + .default('text'), + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey(), + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { key, path } = request.pre.args + const { ipfs } = request.server.app + + let dataEncoding = request.query['data-encoding'] + + if (dataEncoding === 'text') { + dataEncoding = 'utf8' + } - let result; + let result - try { - result = await ipfs.dag.get(key, path); - } catch (err) { - throw Boom.badRequest(err); - } + try { + result = await ipfs.dag.get(key, path) + } catch (err) { + throw Boom.badRequest(err) + } - let value = result.value; + let value = result.value - if (!Buffer.isBuffer(result.value) && result.value.toJSON) { - value = result.value.toJSON(); - } + if (!Buffer.isBuffer(result.value) && result.value.toJSON) { + value = result.value.toJSON() + } - try { - result.value = encodeBufferKeys(value, dataEncoding); - } catch (err) { - throw Boom.boomify(err); - } + try { + result.value = encodeBufferKeys(value, dataEncoding) + } catch (err) { + throw Boom.boomify(err) + } - return h.response(result.value); - } -}; + return h.response(result.value) + }, +} exports.put = { - validate: { - query: Joi.object() - .keys({ - format: Joi.string().default("cbor"), - "input-enc": Joi.string().default("json"), - pin: Joi.boolean(), - hash: Joi.string() - .valid(...Object.keys(mh.names)) - .default("sha2-256"), - "cid-base": Joi.string().valid(...multibase.names) - }) - .unknown() - }, - - // pre request handler that parses the args and returns `node` - // which is assigned to `request.pre.args` - async parseArgs(request, h) { - if (!request.payload) { - throw Boom.badRequest("File argument 'object data' is required"); - } + validate: { + query: Joi.object() + .keys({ + format: Joi.string().default('cbor'), + 'input-enc': Joi.string().default('json'), + pin: Joi.boolean(), + hash: Joi.string() + .valid(...Object.keys(mh.names)) + .default('sha2-256'), + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // pre request handler that parses the args and returns `node` + // which is assigned to `request.pre.args` + async parseArgs(request, h) { + if (!request.payload) { + throw Boom.badRequest("File argument 'object data' is required") + } - const enc = request.query["input-enc"]; + const enc = request.query['input-enc'] - if (!request.headers["content-type"]) { - throw Boom.badRequest("File argument 'object data' is required"); - } + if (!request.headers['content-type']) { + throw Boom.badRequest("File argument 'object data' is required") + } - let data; + let data - for await (const part of multipart(request)) { - if (part.type !== "file") { - continue; - } + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } - data = Buffer.concat(await all(part.content)); - } + data = Buffer.concat(await all(part.content)) + } - if (!data) { - throw Boom.badRequest("File argument 'object data' is required"); - } + if (!data) { + throw Boom.badRequest("File argument 'object data' is required") + } - let format = request.query.format; + let format = request.query.format - if (format === "cbor") { - format = "dag-cbor"; - } + if (format === 'cbor') { + format = 'dag-cbor' + } - let node; - - if (format === "raw") { - node = data; - } else if (enc === "json") { - try { - node = JSON.parse(data.toString()); - } catch (err) { - throw Boom.badRequest("Failed to parse the JSON: " + err); - } - } else { - const codec = multicodec[format.toUpperCase().replace(/-/g, "_")]; - if (!IpldFormats[codec]) - throw new Error(`Missing IPLD format "${codec}"`); - node = await IpldFormats[codec].util.deserialize(data); - } + let node + + if (format === 'raw') { + node = data + } else if (enc === 'json') { + try { + node = JSON.parse(data.toString()) + } catch (err) { + throw Boom.badRequest('Failed to parse the JSON: ' + err) + } + } else { + const codec = multicodec[format.toUpperCase().replace(/-/g, '_')] + if (!IpldFormats[codec]) + throw new Error(`Missing IPLD format "${codec}"`) + node = await IpldFormats[codec].util.deserialize(data) + } - return { - node, - format, - hashAlg: request.query.hash - }; - }, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - console.log("========"); - console.log("========"); - console.log("========"); - console.log("========"); - console.log("========"); - console.log("Incoming request!"); - console.log("========"); - const { ipfs } = request.server.app; - const { node, format, hashAlg } = request.pre.args; - - let cid; - - try { - cid = await ipfs.dag.put(node, { - format: format, - hashAlg: hashAlg - }); - } catch (err) { - throw Boom.boomify(err, { message: "Failed to put node" }); - } + return { + node, + format, + hashAlg: request.query.hash, + } + }, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('Incoming request!') + console.log('========') + const { ipfs } = request.server.app + const { node, format, hashAlg } = request.pre.args + + let cid + + try { + cid = await ipfs.dag.put(node, { + format: format, + hashAlg: hashAlg, + }) + } catch (err) { + throw Boom.boomify(err, { message: 'Failed to put node' }) + } - if (request.query.pin) { - await ipfs.pin.add(cid); - } + if (request.query.pin) { + await ipfs.pin.add(cid) + } - return h.response({ - Cid: { - "/": cidToString(cid, { - base: request.query["cid-base"] + return h.response({ + Cid: { + '/': cidToString(cid, { + base: request.query['cid-base'], + }), + }, }) - } - }); - } -}; + }, +} exports.resolve = { - validate: { - query: Joi.object() - .keys({ - "cid-base": Joi.string().valid(...multibase.names) - }) - .unknown() - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey("argument", "ref", '"'), - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { ref, path } = request.pre.args; - const { ipfs } = request.server.app; - - // to be consistent with go we need to return the CID to the last node we've traversed - // along with the path inside that node as the remainder path - try { - let lastCid = ref; - let lastRemainderPath = path; - - if (path) { - const result = ipfs.dag.resolve(lastCid, path); - while (true) { - const resolveResult = (await result.next()).value; - if (!CID.isCID(resolveResult.value)) { - break; - } - - lastRemainderPath = resolveResult.remainderPath; - lastCid = resolveResult.value; + validate: { + query: Joi.object() + .keys({ + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey('argument', 'ref', '"'), + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { ref, path } = request.pre.args + const { ipfs } = request.server.app + + // to be consistent with go we need to return the CID to the last node we've traversed + // along with the path inside that node as the remainder path + try { + let lastCid = ref + let lastRemainderPath = path + + if (path) { + const result = ipfs.dag.resolve(lastCid, path) + while (true) { + const resolveResult = (await result.next()).value + if (!CID.isCID(resolveResult.value)) { + break + } + + lastRemainderPath = resolveResult.remainderPath + lastCid = resolveResult.value + } + } + + return h.response({ + Cid: { + '/': cidToString(lastCid, { + base: request.query['cid-base'], + }), + }, + RemPath: lastRemainderPath || '', + }) + } catch (err) { + throw Boom.boomify(err) } - } - - return h.response({ - Cid: { - "/": cidToString(lastCid, { - base: request.query["cid-base"] - }) - }, - RemPath: lastRemainderPath || "" - }); - } catch (err) { - throw Boom.boomify(err); - } - } -}; + }, +} diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index e96dce7fb0..0a3770db3e 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -1,417 +1,427 @@ -"use strict"; - -const multipart = require("ipfs-multipart"); -const debug = require("debug"); -const tar = require("it-tar"); -const log = debug("ipfs:http-api:files"); -log.error = debug("ipfs:http-api:files:error"); -const toIterable = require("stream-to-it"); -const Joi = require("@hapi/joi"); -const Boom = require("@hapi/boom"); -const { PassThrough } = require("stream"); -const multibase = require("multibase"); -const isIpfs = require("is-ipfs"); -const { cidToString } = require("../../../utils/cid"); -const { Format } = require("../../../core/components/refs"); -const pipe = require("it-pipe"); -const all = require("it-all"); -const ndjson = require("iterable-ndjson"); -const { map } = require("streaming-iterables"); -const streamResponse = require("../../utils/stream-response"); +'use strict' + +const multipart = require('ipfs-multipart') +const debug = require('debug') +const tar = require('it-tar') +const log = debug('ipfs:http-api:files') +log.error = debug('ipfs:http-api:files:error') +const toIterable = require('stream-to-it') +const Joi = require('@hapi/joi') +const Boom = require('@hapi/boom') +const { PassThrough } = require('stream') +const multibase = require('multibase') +const isIpfs = require('is-ipfs') +const { cidToString } = require('../../../utils/cid') +const { Format } = require('../../../core/components/refs') +const pipe = require('it-pipe') +const all = require('it-all') +const ndjson = require('iterable-ndjson') +const { map } = require('streaming-iterables') +const streamResponse = require('../../utils/stream-response') const toBuffer = async function*(source) { - for await (const chunk of source) { - yield chunk.slice(); - } -}; + for await (const chunk of source) { + yield chunk.slice() + } +} function numberFromQuery(query, key) { - if (query && query[key] !== undefined) { - const value = parseInt(query[key], 10); + if (query && query[key] !== undefined) { + const value = parseInt(query[key], 10) - if (isNaN(value)) { - return undefined; - } + if (isNaN(value)) { + return undefined + } - return value; - } + return value + } } // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` exports.parseKey = (request, h) => { - const { arg } = request.query; - - if (!arg) { - throw Boom.badRequest("Argument 'key' is required"); - } - - const isArray = Array.isArray(arg); - const args = isArray ? arg : [arg]; - for (const arg of args) { - if ( - !isIpfs.ipfsPath(arg) && - !isIpfs.cid(arg) && - !isIpfs.ipfsPath("/ipfs/" + arg) - ) { - throw Boom.badRequest(`invalid ipfs ref path '${arg}'`); + const { arg } = request.query + + if (!arg) { + throw Boom.badRequest("Argument 'key' is required") + } + + const isArray = Array.isArray(arg) + const args = isArray ? arg : [arg] + for (const arg of args) { + if ( + !isIpfs.ipfsPath(arg) && + !isIpfs.cid(arg) && + !isIpfs.ipfsPath('/ipfs/' + arg) + ) { + throw Boom.badRequest(`invalid ipfs ref path '${arg}'`) + } } - } - return { - key: isArray ? args : arg, - options: { - offset: numberFromQuery(request.query, "offset"), - length: numberFromQuery(request.query, "length") + return { + key: isArray ? args : arg, + options: { + offset: numberFromQuery(request.query, 'offset'), + length: numberFromQuery(request.query, 'length'), + }, } - }; -}; +} exports.cat = { - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app; - const { key, options } = request.pre.args; - - return streamResponse(request, h, () => ipfs.cat(key, options), { - onError(err) { - err.message = - err.message === "file does not exist" - ? err.message - : "Failed to cat file: " + err.message; - } - }); - } -}; + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key, options } = request.pre.args + + return streamResponse(request, h, () => ipfs.cat(key, options), { + onError(err) { + err.message = + err.message === 'file does not exist' + ? err.message + : 'Failed to cat file: ' + err.message + }, + }) + }, +} exports.get = { - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app; - const { key } = request.pre.args; - - return streamResponse(request, h, () => - pipe( - ipfs.get(key), - async function*(source) { - for await (const file of source) { - const header = { - name: file.path - }; - - if (file.content) { - yield { - header: { ...header, size: file.size }, - body: toBuffer(file.content) - }; - } else { - yield { header: { ...header, type: "directory" } }; - } - } - }, - tar.pack(), - toBuffer - ) - ); - } -}; + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + + return streamResponse(request, h, () => + pipe( + ipfs.get(key), + async function*(source) { + for await (const file of source) { + const header = { + name: file.path, + } + + if (file.content) { + yield { + header: { ...header, size: file.size }, + body: toBuffer(file.content), + } + } else { + yield { header: { ...header, type: 'directory' } } + } + } + }, + tar.pack(), + toBuffer + ) + ) + }, +} exports.add = { - validate: { - query: Joi.object() - .keys({ - "cid-version": Joi.number() - .integer() - .min(0) - .max(1) - .default(0), - "cid-base": Joi.string().valid(...multibase.names), - "raw-leaves": Joi.boolean(), - "only-hash": Joi.boolean(), - pin: Joi.boolean().default(true), - "wrap-with-directory": Joi.boolean(), - "file-import-concurrency": Joi.number() - .integer() - .min(0) - .default(50), - "block-write-concurrency": Joi.number() - .integer() - .min(0) - .default(10), - chunker: Joi.string(), - trickle: Joi.boolean(), - preload: Joi.boolean().default(true) - }) - // TODO: Necessary until validate "recursive", "stream-channels" etc. - .options({ allowUnknown: true }) - }, - - handler(request, h) { - if (!request.payload) { - throw Boom.badRequest("Array, Buffer, or String is required."); - } - console.log("========"); - console.log("========"); - console.log("========"); - console.log("========"); - console.log("========"); - console.log("Incoming request!"); - console.log("========"); - // console.log(request.payload); - - const { ipfs } = request.server.app; - let filesParsed = false; - let currentFileName; - const output = new PassThrough(); - const progressHandler = bytes => { - output.write( - JSON.stringify({ - Name: currentFileName, - Bytes: bytes - }) + "\n" - ); - }; - - pipe( - multipart(request), - async function*(source) { - console.log("-------"); - console.log({ source }); - for await (const entry of source) { - console.log("-------"); - // console.log({ entry }); - currentFileName = entry.name || "unknown"; - - if (entry.type === "file") { - filesParsed = true; - - // Just for logging - for await (const chunk of entry.content) { - console.log({ chunk }); - } - // end of extra logging code - - yield { - path: entry.name, - content: entry.content, - mode: entry.mode, - mtime: entry.mtime - }; - } - - if (entry.type === "directory") { - filesParsed = true; - - yield { - path: entry.name, - mode: entry.mode, - mtime: entry.mtime - }; - } + validate: { + query: Joi.object() + .keys({ + 'cid-version': Joi.number() + .integer() + .min(0) + .max(1) + .default(0), + 'cid-base': Joi.string().valid(...multibase.names), + 'raw-leaves': Joi.boolean(), + 'only-hash': Joi.boolean(), + pin: Joi.boolean().default(true), + 'wrap-with-directory': Joi.boolean(), + 'file-import-concurrency': Joi.number() + .integer() + .min(0) + .default(50), + 'block-write-concurrency': Joi.number() + .integer() + .min(0) + .default(10), + chunker: Joi.string(), + trickle: Joi.boolean(), + preload: Joi.boolean().default(true), + }) + // TODO: Necessary until validate "recursive", "stream-channels" etc. + .options({ allowUnknown: true }), + }, + + handler(request, h) { + if (!request.payload) { + throw Boom.badRequest('Array, Buffer, or String is required.') } - }, - function(source) { - return ipfs.add(source, { - cidVersion: request.query["cid-version"], - rawLeaves: request.query["raw-leaves"], - progress: request.query.progress ? progressHandler : () => {}, - onlyHash: request.query["only-hash"], - hashAlg: request.query.hash, - wrapWithDirectory: request.query["wrap-with-directory"], - pin: request.query.pin, - chunker: request.query.chunker, - trickle: request.query.trickle, - preload: request.query.preload, - - // this has to be hardcoded to 1 because we can only read one file - // at a time from a http request and we have to consume it completely - // before we can read the next file - fileImportConcurrency: 1, - blockWriteConcurrency: request.query["block-write-concurrency"] - }); - }, - map(file => { - const entry = { - Name: file.path, - Hash: cidToString(file.cid, { base: request.query["cid-base"] }), - Size: file.size, - Mode: - file.mode === undefined - ? undefined - : file.mode.toString(8).padStart(4, "0") - }; - - if (file.mtime) { - entry.Mtime = file.mtime.secs; - entry.MtimeNsecs = file.mtime.nsecs; + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('Incoming request!') + console.log('========') + // console.log(request.payload); + + const { ipfs } = request.server.app + let filesParsed = false + let currentFileName + const output = new PassThrough() + const progressHandler = bytes => { + output.write( + JSON.stringify({ + Name: currentFileName, + Bytes: bytes, + }) + '\n' + ) } - return entry; - }), - ndjson.stringify, - toIterable.sink(output) - ) - .then(() => { - if (!filesParsed) { - throw new Error("File argument 'data' is required."); - } - }) - .catch(err => { - if (!filesParsed) { - output.write(" "); + pipe( + multipart(request), + async function*(source) { + console.log('-------') + console.log({ source }) + for await (const entry of source) { + console.log('-------') + // console.log({ entry }); + currentFileName = entry.name || 'unknown' + + if (entry.type === 'file') { + filesParsed = true + + // Just for logging + for await (const chunk of entry.content) { + console.log({ chunk }) + } + // end of extra logging code + + yield { + path: entry.name, + content: entry.content, + mode: entry.mode, + mtime: entry.mtime, + } + } + + if (entry.type === 'directory') { + filesParsed = true + + yield { + path: entry.name, + mode: entry.mode, + mtime: entry.mtime, + } + } + } + }, + function(source) { + return ipfs.add(source, { + cidVersion: request.query['cid-version'], + rawLeaves: request.query['raw-leaves'], + progress: request.query.progress + ? progressHandler + : () => {}, + onlyHash: request.query['only-hash'], + hashAlg: request.query.hash, + wrapWithDirectory: request.query['wrap-with-directory'], + pin: request.query.pin, + chunker: request.query.chunker, + trickle: request.query.trickle, + preload: request.query.preload, + + // this has to be hardcoded to 1 because we can only read one file + // at a time from a http request and we have to consume it completely + // before we can read the next file + fileImportConcurrency: 1, + blockWriteConcurrency: + request.query['block-write-concurrency'], + }) + }, + map(file => { + const entry = { + Name: file.path, + Hash: cidToString(file.cid, { + base: request.query['cid-base'], + }), + Size: file.size, + Mode: + file.mode === undefined + ? undefined + : file.mode.toString(8).padStart(4, '0'), + } + + if (file.mtime) { + entry.Mtime = file.mtime.secs + entry.MtimeNsecs = file.mtime.nsecs + } + + return entry + }), + ndjson.stringify, + toIterable.sink(output) + ) + .then(() => { + if (!filesParsed) { + throw new Error("File argument 'data' is required.") + } + }) + .catch(err => { + if (!filesParsed) { + output.write(' ') + } + + request.raw.res.addTrailers({ + 'X-Stream-Error': JSON.stringify({ + Message: err.message, + Code: 0, + }), + }) + }) + .then(() => { + output.end() + }) + + return h + .response(output) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + .header('Trailer', 'X-Stream-Error') + }, +} + +exports.ls = { + validate: { + query: Joi.object() + .keys({ + 'cid-base': Joi.string().valid(...multibase.names), + stream: Joi.boolean(), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + const recursive = request.query && request.query.recursive === 'true' + const cidBase = request.query['cid-base'] + + const mapLink = link => { + const output = { + Name: link.name, + Hash: cidToString(link.cid, { base: cidBase }), + Size: link.size, + Type: toTypeCode(link.type), + Depth: link.depth, + Mode: link.mode.toString(8).padStart(4, '0'), + } + + if (link.mtime) { + output.Mtime = link.mtime.secs + + if ( + link.mtime.nsecs !== null && + link.mtime.nsecs !== undefined + ) { + output.MtimeNsecs = link.mtime.nsecs + } + } + + return output } - request.raw.res.addTrailers({ - "X-Stream-Error": JSON.stringify({ - Message: err.message, - Code: 0 - }) - }); - }) - .then(() => { - output.end(); - }); - - return h - .response(output) - .header("x-chunked-output", "1") - .header("content-type", "application/json") - .header("Trailer", "X-Stream-Error"); - } -}; + if (!request.query.stream) { + let links + try { + links = await all(ipfs.ls(key, { recursive })) + } catch (err) { + throw Boom.boomify(err, { message: 'Failed to list dir' }) + } -exports.ls = { - validate: { - query: Joi.object() - .keys({ - "cid-base": Joi.string().valid(...multibase.names), - stream: Joi.boolean() - }) - .unknown() - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { ipfs } = request.server.app; - const { key } = request.pre.args; - const recursive = request.query && request.query.recursive === "true"; - const cidBase = request.query["cid-base"]; - - const mapLink = link => { - const output = { - Name: link.name, - Hash: cidToString(link.cid, { base: cidBase }), - Size: link.size, - Type: toTypeCode(link.type), - Depth: link.depth, - Mode: link.mode.toString(8).padStart(4, "0") - }; - - if (link.mtime) { - output.Mtime = link.mtime.secs; - - if (link.mtime.nsecs !== null && link.mtime.nsecs !== undefined) { - output.MtimeNsecs = link.mtime.nsecs; + return h.response({ + Objects: [{ Hash: key, Links: links.map(mapLink) }], + }) } - } - - return output; - }; - - if (!request.query.stream) { - let links; - try { - links = await all(ipfs.ls(key, { recursive })); - } catch (err) { - throw Boom.boomify(err, { message: "Failed to list dir" }); - } - - return h.response({ - Objects: [{ Hash: key, Links: links.map(mapLink) }] - }); - } - return streamResponse(request, h, () => - pipe( - ipfs.ls(key, { recursive }), - map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), - ndjson.stringify - ) - ); - } -}; + return streamResponse(request, h, () => + pipe( + ipfs.ls(key, { recursive }), + map(link => ({ + Objects: [{ Hash: key, Links: [mapLink(link)] }], + })), + ndjson.stringify + ) + ) + }, +} function toTypeCode(type) { - switch (type) { - case "dir": - return 1; - case "file": - return 2; - default: - return 0; - } + switch (type) { + case 'dir': + return 1 + case 'file': + return 2 + default: + return 0 + } } exports.refs = { - validate: { - query: Joi.object() - .keys({ - recursive: Joi.boolean().default(false), - format: Joi.string().default(Format.default), - edges: Joi.boolean().default(false), - unique: Joi.boolean().default(false), - "max-depth": Joi.number() - .integer() - .min(-1) - }) - .unknown() - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app; - const { key } = request.pre.args; - - const options = { - recursive: request.query.recursive, - format: request.query.format, - edges: request.query.edges, - unique: request.query.unique, - maxDepth: request.query["max-depth"] - }; - - return streamResponse(request, h, () => - pipe( - ipfs.refs(key, options), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ); - } -}; + validate: { + query: Joi.object() + .keys({ + recursive: Joi.boolean().default(false), + format: Joi.string().default(Format.default), + edges: Joi.boolean().default(false), + unique: Joi.boolean().default(false), + 'max-depth': Joi.number() + .integer() + .min(-1), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + + const options = { + recursive: request.query.recursive, + format: request.query.format, + edges: request.query.edges, + unique: request.query.unique, + maxDepth: request.query['max-depth'], + } + + return streamResponse(request, h, () => + pipe( + ipfs.refs(key, options), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ) + }, +} exports.refs.local = { - // main route handler - handler(request, h) { - const { ipfs } = request.server.app; - - return streamResponse(request, h, () => - pipe( - ipfs.refs.local(), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ); - } -}; + // main route handler + handler(request, h) { + const { ipfs } = request.server.app + + return streamResponse(request, h, () => + pipe( + ipfs.refs.local(), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ) + }, +} From 5f29e6dbe0c47fac83683ac2a962ff259d882671 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 13:32:05 -0500 Subject: [PATCH 03/20] Fixed indentation --- packages/ipfs/src/http/api/resources/dag.js | 527 +++++++------ .../src/http/api/resources/files-regular.js | 718 +++++++++--------- 2 files changed, 618 insertions(+), 627 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index 13e554b996..a678e9587b 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -14,316 +14,313 @@ const log = debug('ipfs:http-api:dag') log.error = debug('ipfs:http-api:dag:error') const IpldFormats = { - get [multicodec.RAW]() { - return require('ipld-raw') - }, - get [multicodec.DAG_PB]() { - return require('ipld-dag-pb') - }, - get [multicodec.DAG_CBOR]() { - return require('ipld-dag-cbor') - }, - get [multicodec.BITCOIN_BLOCK]() { - return require('ipld-bitcoin') - }, - get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { - return require('ipld-ethereum').ethAccountSnapshot - }, - get [multicodec.ETH_BLOCK]() { - return require('ipld-ethereum').ethBlock - }, - get [multicodec.ETH_BLOCK_LIST]() { - return require('ipld-ethereum').ethBlockList - }, - get [multicodec.ETH_STATE_TRIE]() { - return require('ipld-ethereum').ethStateTrie - }, - get [multicodec.ETH_STORAGE_TRIE]() { - return require('ipld-ethereum').ethStorageTrie - }, - get [multicodec.ETH_TX]() { - return require('ipld-ethereum').ethTx - }, - get [multicodec.ETH_TX_TRIE]() { - return require('ipld-ethereum').ethTxTrie - }, - get [multicodec.GIT_RAW]() { - return require('ipld-git') - }, - get [multicodec.ZCASH_BLOCK]() { - return require('ipld-zcash') - }, + get [multicodec.RAW]() { + return require('ipld-raw') + }, + get [multicodec.DAG_PB]() { + return require('ipld-dag-pb') + }, + get [multicodec.DAG_CBOR]() { + return require('ipld-dag-cbor') + }, + get [multicodec.BITCOIN_BLOCK]() { + return require('ipld-bitcoin') + }, + get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { + return require('ipld-ethereum').ethAccountSnapshot + }, + get [multicodec.ETH_BLOCK]() { + return require('ipld-ethereum').ethBlock + }, + get [multicodec.ETH_BLOCK_LIST]() { + return require('ipld-ethereum').ethBlockList + }, + get [multicodec.ETH_STATE_TRIE]() { + return require('ipld-ethereum').ethStateTrie + }, + get [multicodec.ETH_STORAGE_TRIE]() { + return require('ipld-ethereum').ethStorageTrie + }, + get [multicodec.ETH_TX]() { + return require('ipld-ethereum').ethTx + }, + get [multicodec.ETH_TX_TRIE]() { + return require('ipld-ethereum').ethTxTrie + }, + get [multicodec.GIT_RAW]() { + return require('ipld-git') + }, + get [multicodec.ZCASH_BLOCK]() { + return require('ipld-zcash') + }, } // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` exports.parseKey = (argument = 'Argument', name = 'key', quote = "'") => { - return request => { - if (!request.query.arg) { - // for compatibility with go error messages - throw Boom.badRequest( - `${argument} ${quote}${name}${quote} is required` - ) - } + return request => { + if (!request.query.arg) { + // for compatibility with go error messages + throw Boom.badRequest(`${argument} ${quote}${name}${quote} is required`) + } - let key = request.query.arg.trim() - let path + let key = request.query.arg.trim() + let path - if (key.startsWith('/ipfs')) { - key = key.substring(5) - } + if (key.startsWith('/ipfs')) { + key = key.substring(5) + } - const parts = key.split('/') + const parts = key.split('/') - if (parts.length > 1) { - key = parts.shift() - path = `${parts.join('/')}` - } + if (parts.length > 1) { + key = parts.shift() + path = `${parts.join('/')}` + } - if (path && path.endsWith('/')) { - path = path.substring(0, path.length - 1) - } + if (path && path.endsWith('/')) { + path = path.substring(0, path.length - 1) + } - try { - return { - [name]: new CID(key), - path, - } - } catch (err) { - log.error(err) - throw Boom.badRequest("invalid 'ipfs ref' path") - } + try { + return { + [name]: new CID(key), + path, + } + } catch (err) { + log.error(err) + throw Boom.badRequest("invalid 'ipfs ref' path") } + } } const encodeBufferKeys = (obj, encoding) => { - if (!obj) { - return obj - } + if (!obj) { + return obj + } - if (Buffer.isBuffer(obj)) { - return obj.toString(encoding) - } + if (Buffer.isBuffer(obj)) { + return obj.toString(encoding) + } - Object.keys(obj).forEach(key => { - if (Buffer.isBuffer(obj)) { - obj[key] = obj[key].toString(encoding) + Object.keys(obj).forEach(key => { + if (Buffer.isBuffer(obj)) { + obj[key] = obj[key].toString(encoding) - return - } + return + } - if (typeof obj[key] === 'object') { - obj[key] = encodeBufferKeys(obj[key], encoding) - } - }) + if (typeof obj[key] === 'object') { + obj[key] = encodeBufferKeys(obj[key], encoding) + } + }) - return obj + return obj } exports.get = { - validate: { - query: Joi.object() - .keys({ - 'data-encoding': Joi.string() - .valid('text', 'base64', 'hex') - .default('text'), - 'cid-base': Joi.string().valid(...multibase.names), - }) - .unknown(), - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey(), - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { key, path } = request.pre.args - const { ipfs } = request.server.app - - let dataEncoding = request.query['data-encoding'] - - if (dataEncoding === 'text') { - dataEncoding = 'utf8' - } + validate: { + query: Joi.object() + .keys({ + 'data-encoding': Joi.string() + .valid('text', 'base64', 'hex') + .default('text'), + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey(), + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { key, path } = request.pre.args + const { ipfs } = request.server.app + + let dataEncoding = request.query['data-encoding'] + + if (dataEncoding === 'text') { + dataEncoding = 'utf8' + } - let result + let result - try { - result = await ipfs.dag.get(key, path) - } catch (err) { - throw Boom.badRequest(err) - } + try { + result = await ipfs.dag.get(key, path) + } catch (err) { + throw Boom.badRequest(err) + } - let value = result.value + let value = result.value - if (!Buffer.isBuffer(result.value) && result.value.toJSON) { - value = result.value.toJSON() - } + if (!Buffer.isBuffer(result.value) && result.value.toJSON) { + value = result.value.toJSON() + } - try { - result.value = encodeBufferKeys(value, dataEncoding) - } catch (err) { - throw Boom.boomify(err) - } + try { + result.value = encodeBufferKeys(value, dataEncoding) + } catch (err) { + throw Boom.boomify(err) + } - return h.response(result.value) - }, + return h.response(result.value) + }, } exports.put = { - validate: { - query: Joi.object() - .keys({ - format: Joi.string().default('cbor'), - 'input-enc': Joi.string().default('json'), - pin: Joi.boolean(), - hash: Joi.string() - .valid(...Object.keys(mh.names)) - .default('sha2-256'), - 'cid-base': Joi.string().valid(...multibase.names), - }) - .unknown(), - }, - - // pre request handler that parses the args and returns `node` - // which is assigned to `request.pre.args` - async parseArgs(request, h) { - if (!request.payload) { - throw Boom.badRequest("File argument 'object data' is required") - } + validate: { + query: Joi.object() + .keys({ + format: Joi.string().default('cbor'), + 'input-enc': Joi.string().default('json'), + pin: Joi.boolean(), + hash: Joi.string() + .valid(...Object.keys(mh.names)) + .default('sha2-256'), + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // pre request handler that parses the args and returns `node` + // which is assigned to `request.pre.args` + async parseArgs(request, h) { + if (!request.payload) { + throw Boom.badRequest("File argument 'object data' is required") + } - const enc = request.query['input-enc'] + const enc = request.query['input-enc'] - if (!request.headers['content-type']) { - throw Boom.badRequest("File argument 'object data' is required") - } + if (!request.headers['content-type']) { + throw Boom.badRequest("File argument 'object data' is required") + } - let data + let data - for await (const part of multipart(request)) { - if (part.type !== 'file') { - continue - } + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } - data = Buffer.concat(await all(part.content)) - } + data = Buffer.concat(await all(part.content)) + } - if (!data) { - throw Boom.badRequest("File argument 'object data' is required") - } + if (!data) { + throw Boom.badRequest("File argument 'object data' is required") + } - let format = request.query.format + let format = request.query.format - if (format === 'cbor') { - format = 'dag-cbor' - } + if (format === 'cbor') { + format = 'dag-cbor' + } - let node - - if (format === 'raw') { - node = data - } else if (enc === 'json') { - try { - node = JSON.parse(data.toString()) - } catch (err) { - throw Boom.badRequest('Failed to parse the JSON: ' + err) - } - } else { - const codec = multicodec[format.toUpperCase().replace(/-/g, '_')] - if (!IpldFormats[codec]) - throw new Error(`Missing IPLD format "${codec}"`) - node = await IpldFormats[codec].util.deserialize(data) - } + let node + + if (format === 'raw') { + node = data + } else if (enc === 'json') { + try { + node = JSON.parse(data.toString()) + } catch (err) { + throw Boom.badRequest('Failed to parse the JSON: ' + err) + } + } else { + const codec = multicodec[format.toUpperCase().replace(/-/g, '_')] + if (!IpldFormats[codec]) throw new Error(`Missing IPLD format "${codec}"`) + node = await IpldFormats[codec].util.deserialize(data) + } - return { - node, - format, - hashAlg: request.query.hash, - } - }, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - console.log('========') - console.log('========') - console.log('========') - console.log('========') - console.log('========') - console.log('Incoming request!') - console.log('========') - const { ipfs } = request.server.app - const { node, format, hashAlg } = request.pre.args - - let cid - - try { - cid = await ipfs.dag.put(node, { - format: format, - hashAlg: hashAlg, - }) - } catch (err) { - throw Boom.boomify(err, { message: 'Failed to put node' }) - } + return { + node, + format, + hashAlg: request.query.hash, + } + }, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('Incoming request!') + console.log('========') + const { ipfs } = request.server.app + const { node, format, hashAlg } = request.pre.args + + let cid + + try { + cid = await ipfs.dag.put(node, { + format: format, + hashAlg: hashAlg, + }) + } catch (err) { + throw Boom.boomify(err, { message: 'Failed to put node' }) + } - if (request.query.pin) { - await ipfs.pin.add(cid) - } + if (request.query.pin) { + await ipfs.pin.add(cid) + } - return h.response({ - Cid: { - '/': cidToString(cid, { - base: request.query['cid-base'], - }), - }, - }) - }, + return h.response({ + Cid: { + '/': cidToString(cid, { + base: request.query['cid-base'], + }), + }, + }) + }, } exports.resolve = { - validate: { - query: Joi.object() - .keys({ - 'cid-base': Joi.string().valid(...multibase.names), - }) - .unknown(), - }, - - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey('argument', 'ref', '"'), - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { ref, path } = request.pre.args - const { ipfs } = request.server.app - - // to be consistent with go we need to return the CID to the last node we've traversed - // along with the path inside that node as the remainder path - try { - let lastCid = ref - let lastRemainderPath = path - - if (path) { - const result = ipfs.dag.resolve(lastCid, path) - while (true) { - const resolveResult = (await result.next()).value - if (!CID.isCID(resolveResult.value)) { - break - } - - lastRemainderPath = resolveResult.remainderPath - lastCid = resolveResult.value - } - } - - return h.response({ - Cid: { - '/': cidToString(lastCid, { - base: request.query['cid-base'], - }), - }, - RemPath: lastRemainderPath || '', - }) - } catch (err) { - throw Boom.boomify(err) + validate: { + query: Joi.object() + .keys({ + 'cid-base': Joi.string().valid(...multibase.names), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey('argument', 'ref', '"'), + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { ref, path } = request.pre.args + const { ipfs } = request.server.app + + // to be consistent with go we need to return the CID to the last node we've traversed + // along with the path inside that node as the remainder path + try { + let lastCid = ref + let lastRemainderPath = path + + if (path) { + const result = ipfs.dag.resolve(lastCid, path) + while (true) { + const resolveResult = (await result.next()).value + if (!CID.isCID(resolveResult.value)) { + break + } + + lastRemainderPath = resolveResult.remainderPath + lastCid = resolveResult.value } - }, + } + + return h.response({ + Cid: { + '/': cidToString(lastCid, { + base: request.query['cid-base'], + }), + }, + RemPath: lastRemainderPath || '', + }) + } catch (err) { + throw Boom.boomify(err) + } + }, } diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 0a3770db3e..4ff5fd5113 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -20,408 +20,402 @@ const { map } = require('streaming-iterables') const streamResponse = require('../../utils/stream-response') const toBuffer = async function*(source) { - for await (const chunk of source) { - yield chunk.slice() - } + for await (const chunk of source) { + yield chunk.slice() + } } function numberFromQuery(query, key) { - if (query && query[key] !== undefined) { - const value = parseInt(query[key], 10) - - if (isNaN(value)) { - return undefined - } + if (query && query[key] !== undefined) { + const value = parseInt(query[key], 10) - return value + if (isNaN(value)) { + return undefined } + + return value + } } // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` exports.parseKey = (request, h) => { - const { arg } = request.query - - if (!arg) { - throw Boom.badRequest("Argument 'key' is required") - } - - const isArray = Array.isArray(arg) - const args = isArray ? arg : [arg] - for (const arg of args) { - if ( - !isIpfs.ipfsPath(arg) && - !isIpfs.cid(arg) && - !isIpfs.ipfsPath('/ipfs/' + arg) - ) { - throw Boom.badRequest(`invalid ipfs ref path '${arg}'`) - } + const { arg } = request.query + + if (!arg) { + throw Boom.badRequest("Argument 'key' is required") + } + + const isArray = Array.isArray(arg) + const args = isArray ? arg : [arg] + for (const arg of args) { + if ( + !isIpfs.ipfsPath(arg) && + !isIpfs.cid(arg) && + !isIpfs.ipfsPath('/ipfs/' + arg) + ) { + throw Boom.badRequest(`invalid ipfs ref path '${arg}'`) } + } - return { - key: isArray ? args : arg, - options: { - offset: numberFromQuery(request.query, 'offset'), - length: numberFromQuery(request.query, 'length'), - }, - } + return { + key: isArray ? args : arg, + options: { + offset: numberFromQuery(request.query, 'offset'), + length: numberFromQuery(request.query, 'length'), + }, + } } exports.cat = { - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app - const { key, options } = request.pre.args - - return streamResponse(request, h, () => ipfs.cat(key, options), { - onError(err) { - err.message = - err.message === 'file does not exist' - ? err.message - : 'Failed to cat file: ' + err.message - }, - }) - }, + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key, options } = request.pre.args + + return streamResponse(request, h, () => ipfs.cat(key, options), { + onError(err) { + err.message = + err.message === 'file does not exist' + ? err.message + : 'Failed to cat file: ' + err.message + }, + }) + }, } exports.get = { - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args - - return streamResponse(request, h, () => - pipe( - ipfs.get(key), - async function*(source) { - for await (const file of source) { - const header = { - name: file.path, - } - - if (file.content) { - yield { - header: { ...header, size: file.size }, - body: toBuffer(file.content), - } - } else { - yield { header: { ...header, type: 'directory' } } - } - } - }, - tar.pack(), - toBuffer - ) - ) - }, + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + + return streamResponse(request, h, () => + pipe( + ipfs.get(key), + async function*(source) { + for await (const file of source) { + const header = { + name: file.path, + } + + if (file.content) { + yield { + header: { ...header, size: file.size }, + body: toBuffer(file.content), + } + } else { + yield { header: { ...header, type: 'directory' } } + } + } + }, + tar.pack(), + toBuffer + ) + ) + }, } exports.add = { - validate: { - query: Joi.object() - .keys({ - 'cid-version': Joi.number() - .integer() - .min(0) - .max(1) - .default(0), - 'cid-base': Joi.string().valid(...multibase.names), - 'raw-leaves': Joi.boolean(), - 'only-hash': Joi.boolean(), - pin: Joi.boolean().default(true), - 'wrap-with-directory': Joi.boolean(), - 'file-import-concurrency': Joi.number() - .integer() - .min(0) - .default(50), - 'block-write-concurrency': Joi.number() - .integer() - .min(0) - .default(10), - chunker: Joi.string(), - trickle: Joi.boolean(), - preload: Joi.boolean().default(true), - }) - // TODO: Necessary until validate "recursive", "stream-channels" etc. - .options({ allowUnknown: true }), - }, - - handler(request, h) { - if (!request.payload) { - throw Boom.badRequest('Array, Buffer, or String is required.') - } - console.log('========') - console.log('========') - console.log('========') - console.log('========') - console.log('========') - console.log('Incoming request!') - console.log('========') - // console.log(request.payload); - - const { ipfs } = request.server.app - let filesParsed = false - let currentFileName - const output = new PassThrough() - const progressHandler = bytes => { - output.write( - JSON.stringify({ - Name: currentFileName, - Bytes: bytes, - }) + '\n' - ) - } - - pipe( - multipart(request), - async function*(source) { - console.log('-------') - console.log({ source }) - for await (const entry of source) { - console.log('-------') - // console.log({ entry }); - currentFileName = entry.name || 'unknown' - - if (entry.type === 'file') { - filesParsed = true - - // Just for logging - for await (const chunk of entry.content) { - console.log({ chunk }) - } - // end of extra logging code - - yield { - path: entry.name, - content: entry.content, - mode: entry.mode, - mtime: entry.mtime, - } - } - - if (entry.type === 'directory') { - filesParsed = true - - yield { - path: entry.name, - mode: entry.mode, - mtime: entry.mtime, - } - } - } - }, - function(source) { - return ipfs.add(source, { - cidVersion: request.query['cid-version'], - rawLeaves: request.query['raw-leaves'], - progress: request.query.progress - ? progressHandler - : () => {}, - onlyHash: request.query['only-hash'], - hashAlg: request.query.hash, - wrapWithDirectory: request.query['wrap-with-directory'], - pin: request.query.pin, - chunker: request.query.chunker, - trickle: request.query.trickle, - preload: request.query.preload, - - // this has to be hardcoded to 1 because we can only read one file - // at a time from a http request and we have to consume it completely - // before we can read the next file - fileImportConcurrency: 1, - blockWriteConcurrency: - request.query['block-write-concurrency'], - }) - }, - map(file => { - const entry = { - Name: file.path, - Hash: cidToString(file.cid, { - base: request.query['cid-base'], - }), - Size: file.size, - Mode: - file.mode === undefined - ? undefined - : file.mode.toString(8).padStart(4, '0'), - } - - if (file.mtime) { - entry.Mtime = file.mtime.secs - entry.MtimeNsecs = file.mtime.nsecs - } - - return entry - }), - ndjson.stringify, - toIterable.sink(output) - ) - .then(() => { - if (!filesParsed) { - throw new Error("File argument 'data' is required.") - } - }) - .catch(err => { - if (!filesParsed) { - output.write(' ') - } - - request.raw.res.addTrailers({ - 'X-Stream-Error': JSON.stringify({ - Message: err.message, - Code: 0, - }), - }) - }) - .then(() => { - output.end() - }) - - return h - .response(output) - .header('x-chunked-output', '1') - .header('content-type', 'application/json') - .header('Trailer', 'X-Stream-Error') - }, -} + validate: { + query: Joi.object() + .keys({ + 'cid-version': Joi.number() + .integer() + .min(0) + .max(1) + .default(0), + 'cid-base': Joi.string().valid(...multibase.names), + 'raw-leaves': Joi.boolean(), + 'only-hash': Joi.boolean(), + pin: Joi.boolean().default(true), + 'wrap-with-directory': Joi.boolean(), + 'file-import-concurrency': Joi.number() + .integer() + .min(0) + .default(50), + 'block-write-concurrency': Joi.number() + .integer() + .min(0) + .default(10), + chunker: Joi.string(), + trickle: Joi.boolean(), + preload: Joi.boolean().default(true), + }) + // TODO: Necessary until validate "recursive", "stream-channels" etc. + .options({ allowUnknown: true }), + }, + + handler(request, h) { + if (!request.payload) { + throw Boom.badRequest('Array, Buffer, or String is required.') + } + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('========') + console.log('Incoming request!') + console.log('========') + // console.log(request.payload); + + const { ipfs } = request.server.app + let filesParsed = false + let currentFileName + const output = new PassThrough() + const progressHandler = bytes => { + output.write( + JSON.stringify({ + Name: currentFileName, + Bytes: bytes, + }) + '\n' + ) + } -exports.ls = { - validate: { - query: Joi.object() - .keys({ - 'cid-base': Joi.string().valid(...multibase.names), - stream: Joi.boolean(), - }) - .unknown(), - }, + pipe( + multipart(request), + async function*(source) { + console.log('-------') + console.log({ source }) + for await (const entry of source) { + console.log('-------') + // console.log({ entry }); + currentFileName = entry.name || 'unknown' + + if (entry.type === 'file') { + filesParsed = true + + // Just for logging + for await (const chunk of entry.content) { + console.log({ chunk }) + } + // end of extra logging code - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args - const recursive = request.query && request.query.recursive === 'true' - const cidBase = request.query['cid-base'] - - const mapLink = link => { - const output = { - Name: link.name, - Hash: cidToString(link.cid, { base: cidBase }), - Size: link.size, - Type: toTypeCode(link.type), - Depth: link.depth, - Mode: link.mode.toString(8).padStart(4, '0'), + yield { + path: entry.name, + content: entry.content, + mode: entry.mode, + mtime: entry.mtime, } + } - if (link.mtime) { - output.Mtime = link.mtime.secs + if (entry.type === 'directory') { + filesParsed = true - if ( - link.mtime.nsecs !== null && - link.mtime.nsecs !== undefined - ) { - output.MtimeNsecs = link.mtime.nsecs - } + yield { + path: entry.name, + mode: entry.mode, + mtime: entry.mtime, } - - return output + } + } + }, + function(source) { + return ipfs.add(source, { + cidVersion: request.query['cid-version'], + rawLeaves: request.query['raw-leaves'], + progress: request.query.progress ? progressHandler : () => {}, + onlyHash: request.query['only-hash'], + hashAlg: request.query.hash, + wrapWithDirectory: request.query['wrap-with-directory'], + pin: request.query.pin, + chunker: request.query.chunker, + trickle: request.query.trickle, + preload: request.query.preload, + + // this has to be hardcoded to 1 because we can only read one file + // at a time from a http request and we have to consume it completely + // before we can read the next file + fileImportConcurrency: 1, + blockWriteConcurrency: request.query['block-write-concurrency'], + }) + }, + map(file => { + const entry = { + Name: file.path, + Hash: cidToString(file.cid, { + base: request.query['cid-base'], + }), + Size: file.size, + Mode: + file.mode === undefined + ? undefined + : file.mode.toString(8).padStart(4, '0'), } - if (!request.query.stream) { - let links - try { - links = await all(ipfs.ls(key, { recursive })) - } catch (err) { - throw Boom.boomify(err, { message: 'Failed to list dir' }) - } + if (file.mtime) { + entry.Mtime = file.mtime.secs + entry.MtimeNsecs = file.mtime.nsecs + } - return h.response({ - Objects: [{ Hash: key, Links: links.map(mapLink) }], - }) + return entry + }), + ndjson.stringify, + toIterable.sink(output) + ) + .then(() => { + if (!filesParsed) { + throw new Error("File argument 'data' is required.") + } + }) + .catch(err => { + if (!filesParsed) { + output.write(' ') } - return streamResponse(request, h, () => - pipe( - ipfs.ls(key, { recursive }), - map(link => ({ - Objects: [{ Hash: key, Links: [mapLink(link)] }], - })), - ndjson.stringify - ) - ) - }, + request.raw.res.addTrailers({ + 'X-Stream-Error': JSON.stringify({ + Message: err.message, + Code: 0, + }), + }) + }) + .then(() => { + output.end() + }) + + return h + .response(output) + .header('x-chunked-output', '1') + .header('content-type', 'application/json') + .header('Trailer', 'X-Stream-Error') + }, } -function toTypeCode(type) { - switch (type) { - case 'dir': - return 1 - case 'file': - return 2 - default: - return 0 +exports.ls = { + validate: { + query: Joi.object() + .keys({ + 'cid-base': Joi.string().valid(...multibase.names), + stream: Joi.boolean(), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + async handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + const recursive = request.query && request.query.recursive === 'true' + const cidBase = request.query['cid-base'] + + const mapLink = link => { + const output = { + Name: link.name, + Hash: cidToString(link.cid, { base: cidBase }), + Size: link.size, + Type: toTypeCode(link.type), + Depth: link.depth, + Mode: link.mode.toString(8).padStart(4, '0'), + } + + if (link.mtime) { + output.Mtime = link.mtime.secs + + if (link.mtime.nsecs !== null && link.mtime.nsecs !== undefined) { + output.MtimeNsecs = link.mtime.nsecs + } + } + + return output } -} -exports.refs = { - validate: { - query: Joi.object() - .keys({ - recursive: Joi.boolean().default(false), - format: Joi.string().default(Format.default), - edges: Joi.boolean().default(false), - unique: Joi.boolean().default(false), - 'max-depth': Joi.number() - .integer() - .min(-1), - }) - .unknown(), - }, + if (!request.query.stream) { + let links + try { + links = await all(ipfs.ls(key, { recursive })) + } catch (err) { + throw Boom.boomify(err, { message: 'Failed to list dir' }) + } + + return h.response({ + Objects: [{ Hash: key, Links: links.map(mapLink) }], + }) + } - // uses common parseKey method that returns a `key` - parseArgs: exports.parseKey, + return streamResponse(request, h, () => + pipe( + ipfs.ls(key, { recursive }), + map(link => ({ + Objects: [{ Hash: key, Links: [mapLink(link)] }], + })), + ndjson.stringify + ) + ) + }, +} - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { - const { ipfs } = request.server.app - const { key } = request.pre.args +function toTypeCode(type) { + switch (type) { + case 'dir': + return 1 + case 'file': + return 2 + default: + return 0 + } +} - const options = { - recursive: request.query.recursive, - format: request.query.format, - edges: request.query.edges, - unique: request.query.unique, - maxDepth: request.query['max-depth'], - } +exports.refs = { + validate: { + query: Joi.object() + .keys({ + recursive: Joi.boolean().default(false), + format: Joi.string().default(Format.default), + edges: Joi.boolean().default(false), + unique: Joi.boolean().default(false), + 'max-depth': Joi.number() + .integer() + .min(-1), + }) + .unknown(), + }, + + // uses common parseKey method that returns a `key` + parseArgs: exports.parseKey, + + // main route handler which is called after the above `parseArgs`, but only if the args were valid + handler(request, h) { + const { ipfs } = request.server.app + const { key } = request.pre.args + + const options = { + recursive: request.query.recursive, + format: request.query.format, + edges: request.query.edges, + unique: request.query.unique, + maxDepth: request.query['max-depth'], + } - return streamResponse(request, h, () => - pipe( - ipfs.refs(key, options), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ) - }, + return streamResponse(request, h, () => + pipe( + ipfs.refs(key, options), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ) + }, } exports.refs.local = { - // main route handler - handler(request, h) { - const { ipfs } = request.server.app - - return streamResponse(request, h, () => - pipe( - ipfs.refs.local(), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ) - }, + // main route handler + handler(request, h) { + const { ipfs } = request.server.app + + return streamResponse(request, h, () => + pipe( + ipfs.refs.local(), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + ) + ) + }, } From 06a20c2f7817a065f0b1bc4acb5ce66dcb30c264 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 13:43:42 -0500 Subject: [PATCH 04/20] More formatting reversions --- packages/ipfs/src/http/api/resources/dag.js | 42 ++++++++++----------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index a678e9587b..c6129d6a67 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -52,12 +52,12 @@ const IpldFormats = { }, get [multicodec.ZCASH_BLOCK]() { return require('ipld-zcash') - }, + } } // common pre request handler that parses the args and returns `key` which is assigned to `request.pre.args` exports.parseKey = (argument = 'Argument', name = 'key', quote = "'") => { - return request => { + return (request) => { if (!request.query.arg) { // for compatibility with go error messages throw Boom.badRequest(`${argument} ${quote}${name}${quote} is required`) @@ -84,7 +84,7 @@ exports.parseKey = (argument = 'Argument', name = 'key', quote = "'") => { try { return { [name]: new CID(key), - path, + path } } catch (err) { log.error(err) @@ -102,7 +102,7 @@ const encodeBufferKeys = (obj, encoding) => { return obj.toString(encoding) } - Object.keys(obj).forEach(key => { + Object.keys(obj).forEach((key) => { if (Buffer.isBuffer(obj)) { obj[key] = obj[key].toString(encoding) @@ -124,9 +124,9 @@ exports.get = { 'data-encoding': Joi.string() .valid('text', 'base64', 'hex') .default('text'), - 'cid-base': Joi.string().valid(...multibase.names), + 'cid-base': Joi.string().valid(...multibase.names) }) - .unknown(), + .unknown() }, // uses common parseKey method that returns a `key` @@ -164,7 +164,7 @@ exports.get = { } return h.response(result.value) - }, + } } exports.put = { @@ -177,9 +177,9 @@ exports.put = { hash: Joi.string() .valid(...Object.keys(mh.names)) .default('sha2-256'), - 'cid-base': Joi.string().valid(...multibase.names), + 'cid-base': Joi.string().valid(...multibase.names) }) - .unknown(), + .unknown() }, // pre request handler that parses the args and returns `node` @@ -234,7 +234,7 @@ exports.put = { return { node, format, - hashAlg: request.query.hash, + hashAlg: request.query.hash } }, @@ -255,7 +255,7 @@ exports.put = { try { cid = await ipfs.dag.put(node, { format: format, - hashAlg: hashAlg, + hashAlg: hashAlg }) } catch (err) { throw Boom.boomify(err, { message: 'Failed to put node' }) @@ -268,20 +268,20 @@ exports.put = { return h.response({ Cid: { '/': cidToString(cid, { - base: request.query['cid-base'], - }), - }, + base: request.query['cid-base'] + }) + } }) - }, + } } exports.resolve = { validate: { query: Joi.object() .keys({ - 'cid-base': Joi.string().valid(...multibase.names), + 'cid-base': Joi.string().valid(...multibase.names) }) - .unknown(), + .unknown() }, // uses common parseKey method that returns a `key` @@ -314,13 +314,13 @@ exports.resolve = { return h.response({ Cid: { '/': cidToString(lastCid, { - base: request.query['cid-base'], - }), + base: request.query['cid-base'] + }) }, - RemPath: lastRemainderPath || '', + RemPath: lastRemainderPath || '' }) } catch (err) { throw Boom.boomify(err) } - }, + } } From 14f3b1b980fbe5879d6b6a60e0a9b69321be2e87 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 13:57:03 -0500 Subject: [PATCH 05/20] More formatting reversions --- packages/ipfs/src/http/api/resources/dag.js | 36 ++++---- .../src/http/api/resources/files-regular.js | 85 +++++++++---------- 2 files changed, 54 insertions(+), 67 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index c6129d6a67..28681e99f4 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -133,7 +133,7 @@ exports.get = { parseArgs: exports.parseKey(), // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { + async handler (request, h) { const { key, path } = request.pre.args const { ipfs } = request.server.app @@ -169,22 +169,20 @@ exports.get = { exports.put = { validate: { - query: Joi.object() - .keys({ - format: Joi.string().default('cbor'), - 'input-enc': Joi.string().default('json'), - pin: Joi.boolean(), - hash: Joi.string() - .valid(...Object.keys(mh.names)) - .default('sha2-256'), - 'cid-base': Joi.string().valid(...multibase.names) - }) - .unknown() + query: Joi.object().keys({ + format: Joi.string().default('cbor'), + 'input-enc': Joi.string().default('json'), + pin: Joi.boolean(), + hash: Joi.string() + .valid(...Object.keys(mh.names)) + .default('sha2-256'), + 'cid-base': Joi.string().valid(...multibase.names) + }).unknown() }, // pre request handler that parses the args and returns `node` // which is assigned to `request.pre.args` - async parseArgs(request, h) { + async (request, h) { if (!request.payload) { throw Boom.badRequest("File argument 'object data' is required") } @@ -239,7 +237,7 @@ exports.put = { }, // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { + async handler (request, h) { console.log('========') console.log('========') console.log('========') @@ -277,18 +275,16 @@ exports.put = { exports.resolve = { validate: { - query: Joi.object() - .keys({ - 'cid-base': Joi.string().valid(...multibase.names) - }) - .unknown() + query: Joi.object().keys({ + 'cid-base': Joi.string().valid(...multibase.names) + }).unknown() }, // uses common parseKey method that returns a `key` parseArgs: exports.parseKey('argument', 'ref', '"'), // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { + async handler (request, h) { const { ref, path } = request.pre.args const { ipfs } = request.server.app diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 4ff5fd5113..e4a69ec48b 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -25,7 +25,7 @@ const toBuffer = async function*(source) { } } -function numberFromQuery(query, key) { +function numberFromQuery (query, key) { if (query && query[key] !== undefined) { const value = parseInt(query[key], 10) @@ -48,11 +48,7 @@ exports.parseKey = (request, h) => { const isArray = Array.isArray(arg) const args = isArray ? arg : [arg] for (const arg of args) { - if ( - !isIpfs.ipfsPath(arg) && - !isIpfs.cid(arg) && - !isIpfs.ipfsPath('/ipfs/' + arg) - ) { + if (!isIpfs.ipfsPath(arg) && !isIpfs.cid(arg) && !isIpfs.ipfsPath('/ipfs/' + arg)) { throw Boom.badRequest(`invalid ipfs ref path '${arg}'`) } } @@ -61,8 +57,8 @@ exports.parseKey = (request, h) => { key: isArray ? args : arg, options: { offset: numberFromQuery(request.query, 'offset'), - length: numberFromQuery(request.query, 'length'), - }, + length: numberFromQuery(request.query, 'length') + } } } @@ -78,12 +74,10 @@ exports.cat = { return streamResponse(request, h, () => ipfs.cat(key, options), { onError(err) { err.message = - err.message === 'file does not exist' - ? err.message - : 'Failed to cat file: ' + err.message - }, + err.message === 'file does not exist' ? err.message : 'Failed to cat file: ' + err.message + } }) - }, + } } exports.get = { @@ -101,13 +95,13 @@ exports.get = { async function*(source) { for await (const file of source) { const header = { - name: file.path, + name: file.path } if (file.content) { yield { header: { ...header, size: file.size }, - body: toBuffer(file.content), + body: toBuffer(file.content) } } else { yield { header: { ...header, type: 'directory' } } @@ -118,7 +112,7 @@ exports.get = { toBuffer ) ) - }, + } } exports.add = { @@ -145,10 +139,10 @@ exports.add = { .default(10), chunker: Joi.string(), trickle: Joi.boolean(), - preload: Joi.boolean().default(true), + preload: Joi.boolean().default(true) }) // TODO: Necessary until validate "recursive", "stream-channels" etc. - .options({ allowUnknown: true }), + .options({ allowUnknown: true }) }, handler(request, h) { @@ -168,18 +162,18 @@ exports.add = { let filesParsed = false let currentFileName const output = new PassThrough() - const progressHandler = bytes => { + const progressHandler = (bytes) => { output.write( JSON.stringify({ Name: currentFileName, - Bytes: bytes, + Bytes: bytes }) + '\n' ) } pipe( multipart(request), - async function*(source) { + async function * (source) { console.log('-------') console.log({ source }) for await (const entry of source) { @@ -200,7 +194,7 @@ exports.add = { path: entry.name, content: entry.content, mode: entry.mode, - mtime: entry.mtime, + mtime: entry.mtime } } @@ -210,7 +204,7 @@ exports.add = { yield { path: entry.name, mode: entry.mode, - mtime: entry.mtime, + mtime: entry.mtime } } } @@ -232,20 +226,17 @@ exports.add = { // at a time from a http request and we have to consume it completely // before we can read the next file fileImportConcurrency: 1, - blockWriteConcurrency: request.query['block-write-concurrency'], + blockWriteConcurrency: request.query['block-write-concurrency'] }) }, - map(file => { + map((file) => { const entry = { Name: file.path, Hash: cidToString(file.cid, { - base: request.query['cid-base'], + base: request.query['cid-base'] }), Size: file.size, - Mode: - file.mode === undefined - ? undefined - : file.mode.toString(8).padStart(4, '0'), + Mode: file.mode === undefined ? undefined : file.mode.toString(8).padStart(4, '0') } if (file.mtime) { @@ -263,7 +254,7 @@ exports.add = { throw new Error("File argument 'data' is required.") } }) - .catch(err => { + .catch((err) => { if (!filesParsed) { output.write(' ') } @@ -271,8 +262,8 @@ exports.add = { request.raw.res.addTrailers({ 'X-Stream-Error': JSON.stringify({ Message: err.message, - Code: 0, - }), + Code: 0 + }) }) }) .then(() => { @@ -284,7 +275,7 @@ exports.add = { .header('x-chunked-output', '1') .header('content-type', 'application/json') .header('Trailer', 'X-Stream-Error') - }, + } } exports.ls = { @@ -292,9 +283,9 @@ exports.ls = { query: Joi.object() .keys({ 'cid-base': Joi.string().valid(...multibase.names), - stream: Joi.boolean(), + stream: Joi.boolean() }) - .unknown(), + .unknown() }, // uses common parseKey method that returns a `key` @@ -307,14 +298,14 @@ exports.ls = { const recursive = request.query && request.query.recursive === 'true' const cidBase = request.query['cid-base'] - const mapLink = link => { + const mapLink = (link) => { const output = { Name: link.name, Hash: cidToString(link.cid, { base: cidBase }), Size: link.size, Type: toTypeCode(link.type), Depth: link.depth, - Mode: link.mode.toString(8).padStart(4, '0'), + Mode: link.mode.toString(8).padStart(4, '0') } if (link.mtime) { @@ -337,20 +328,20 @@ exports.ls = { } return h.response({ - Objects: [{ Hash: key, Links: links.map(mapLink) }], + Objects: [{ Hash: key, Links: links.map(mapLink) }] }) } return streamResponse(request, h, () => pipe( ipfs.ls(key, { recursive }), - map(link => ({ - Objects: [{ Hash: key, Links: [mapLink(link)] }], + map((link) => ({ + Objects: [{ Hash: key, Links: [mapLink(link)] }] })), ndjson.stringify ) ) - }, + } } function toTypeCode(type) { @@ -374,9 +365,9 @@ exports.refs = { unique: Joi.boolean().default(false), 'max-depth': Joi.number() .integer() - .min(-1), + .min(-1) }) - .unknown(), + .unknown() }, // uses common parseKey method that returns a `key` @@ -392,7 +383,7 @@ exports.refs = { format: request.query.format, edges: request.query.edges, unique: request.query.unique, - maxDepth: request.query['max-depth'], + maxDepth: request.query['max-depth'] } return streamResponse(request, h, () => @@ -402,7 +393,7 @@ exports.refs = { ndjson.stringify ) ) - }, + } } exports.refs.local = { @@ -417,5 +408,5 @@ exports.refs.local = { ndjson.stringify ) ) - }, + } } From 14ace2229f8da7630f86178f45e0528598e2d369 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:00:53 -0500 Subject: [PATCH 06/20] More formatting reversions --- packages/ipfs/src/http/api/resources/dag.js | 53 +++++++++---------- .../src/http/api/resources/files-regular.js | 9 ++-- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index 28681e99f4..deb0dd40b6 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -14,43 +14,43 @@ const log = debug('ipfs:http-api:dag') log.error = debug('ipfs:http-api:dag:error') const IpldFormats = { - get [multicodec.RAW]() { + get [multicodec.RAW] () { return require('ipld-raw') }, - get [multicodec.DAG_PB]() { + get [multicodec.DAG_PB] () { return require('ipld-dag-pb') }, - get [multicodec.DAG_CBOR]() { + get [multicodec.DAG_CBOR] () { return require('ipld-dag-cbor') }, - get [multicodec.BITCOIN_BLOCK]() { + get [multicodec.BITCOIN_BLOCK] () { return require('ipld-bitcoin') }, - get [multicodec.ETH_ACCOUNT_SNAPSHOT]() { + get [multicodec.ETH_ACCOUNT_SNAPSHOT] () { return require('ipld-ethereum').ethAccountSnapshot }, - get [multicodec.ETH_BLOCK]() { + get [multicodec.ETH_BLOCK] () { return require('ipld-ethereum').ethBlock }, - get [multicodec.ETH_BLOCK_LIST]() { + get [multicodec.ETH_BLOCK_LIST] () { return require('ipld-ethereum').ethBlockList }, - get [multicodec.ETH_STATE_TRIE]() { + get [multicodec.ETH_STATE_TRIE] () { return require('ipld-ethereum').ethStateTrie }, - get [multicodec.ETH_STORAGE_TRIE]() { + get [multicodec.ETH_STORAGE_TRIE] () { return require('ipld-ethereum').ethStorageTrie }, - get [multicodec.ETH_TX]() { + get [multicodec.ETH_TX] () { return require('ipld-ethereum').ethTx }, - get [multicodec.ETH_TX_TRIE]() { + get [multicodec.ETH_TX_TRIE] () { return require('ipld-ethereum').ethTxTrie }, - get [multicodec.GIT_RAW]() { + get [multicodec.GIT_RAW] () { return require('ipld-git') }, - get [multicodec.ZCASH_BLOCK]() { + get [multicodec.ZCASH_BLOCK] () { return require('ipld-zcash') } } @@ -102,7 +102,7 @@ const encodeBufferKeys = (obj, encoding) => { return obj.toString(encoding) } - Object.keys(obj).forEach((key) => { + Object.keys(obj).forEach(key => { if (Buffer.isBuffer(obj)) { obj[key] = obj[key].toString(encoding) @@ -119,14 +119,12 @@ const encodeBufferKeys = (obj, encoding) => { exports.get = { validate: { - query: Joi.object() - .keys({ - 'data-encoding': Joi.string() - .valid('text', 'base64', 'hex') - .default('text'), - 'cid-base': Joi.string().valid(...multibase.names) - }) - .unknown() + query: Joi.object().keys({ + 'data-encoding': Joi.string() + .valid('text', 'base64', 'hex') + .default('text'), + 'cid-base': Joi.string().valid(...multibase.names) + }).unknown() }, // uses common parseKey method that returns a `key` @@ -134,7 +132,10 @@ exports.get = { // main route handler which is called after the above `parseArgs`, but only if the args were valid async handler (request, h) { - const { key, path } = request.pre.args + const { + key, + path + } = request.pre.args const { ipfs } = request.server.app let dataEncoding = request.query['data-encoding'] @@ -173,16 +174,14 @@ exports.put = { format: Joi.string().default('cbor'), 'input-enc': Joi.string().default('json'), pin: Joi.boolean(), - hash: Joi.string() - .valid(...Object.keys(mh.names)) - .default('sha2-256'), + hash: Joi.string().valid(...Object.keys(mh.names)).default('sha2-256'), 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, // pre request handler that parses the args and returns `node` // which is assigned to `request.pre.args` - async (request, h) { + async parsePargs (request, h) { if (!request.payload) { throw Boom.badRequest("File argument 'object data' is required") } diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index e4a69ec48b..c54c39ad59 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -73,8 +73,9 @@ exports.cat = { return streamResponse(request, h, () => ipfs.cat(key, options), { onError(err) { - err.message = - err.message === 'file does not exist' ? err.message : 'Failed to cat file: ' + err.message + err.message = err.message === 'file does not exist' + ? err.message + : 'Failed to cat file: ' + err.message } }) } @@ -92,7 +93,7 @@ exports.get = { return streamResponse(request, h, () => pipe( ipfs.get(key), - async function*(source) { + async function * (source) { for await (const file of source) { const header = { name: file.path @@ -292,7 +293,7 @@ exports.ls = { parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - async handler(request, h) { + async handler (request, h) { const { ipfs } = request.server.app const { key } = request.pre.args const recursive = request.query && request.query.recursive === 'true' From 427fa4a3a8c5b2b606ea325f64013b275cf47ad4 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:05:38 -0500 Subject: [PATCH 07/20] More formatting reversions --- .../src/http/api/resources/files-regular.js | 76 +++++++------------ 1 file changed, 28 insertions(+), 48 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index c54c39ad59..9de4dea8a0 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -120,24 +120,14 @@ exports.add = { validate: { query: Joi.object() .keys({ - 'cid-version': Joi.number() - .integer() - .min(0) - .max(1) - .default(0), + 'cid-version': Joi.number().integer().min(0).max(1).default(0), 'cid-base': Joi.string().valid(...multibase.names), 'raw-leaves': Joi.boolean(), 'only-hash': Joi.boolean(), pin: Joi.boolean().default(true), 'wrap-with-directory': Joi.boolean(), - 'file-import-concurrency': Joi.number() - .integer() - .min(0) - .default(50), - 'block-write-concurrency': Joi.number() - .integer() - .min(0) - .default(10), + 'file-import-concurrency': Joi.number().integer().min(0).default(50), + 'block-write-concurrency': Joi.number().integer().min(0).default(10), chunker: Joi.string(), trickle: Joi.boolean(), preload: Joi.boolean().default(true) @@ -163,12 +153,11 @@ exports.add = { let filesParsed = false let currentFileName const output = new PassThrough() - const progressHandler = (bytes) => { - output.write( - JSON.stringify({ - Name: currentFileName, - Bytes: bytes - }) + '\n' + const progressHandler = bytes => { + output.write(JSON.stringify({ + Name: currentFileName, + Bytes: bytes + }) + '\n' ) } @@ -210,7 +199,7 @@ exports.add = { } } }, - function(source) { + function (source) { return ipfs.add(source, { cidVersion: request.query['cid-version'], rawLeaves: request.query['raw-leaves'], @@ -233,9 +222,7 @@ exports.add = { map((file) => { const entry = { Name: file.path, - Hash: cidToString(file.cid, { - base: request.query['cid-base'] - }), + Hash: cidToString(file.cid, { base: request.query['cid-base'] }), Size: file.size, Mode: file.mode === undefined ? undefined : file.mode.toString(8).padStart(4, '0') } @@ -255,7 +242,7 @@ exports.add = { throw new Error("File argument 'data' is required.") } }) - .catch((err) => { + .catch(err => { if (!filesParsed) { output.write(' ') } @@ -271,8 +258,7 @@ exports.add = { output.end() }) - return h - .response(output) + return h.response(output) .header('x-chunked-output', '1') .header('content-type', 'application/json') .header('Trailer', 'X-Stream-Error') @@ -281,12 +267,10 @@ exports.add = { exports.ls = { validate: { - query: Joi.object() - .keys({ - 'cid-base': Joi.string().valid(...multibase.names), - stream: Joi.boolean() - }) - .unknown() + query: Joi.object().keys({ + 'cid-base': Joi.string().valid(...multibase.names), + stream: Joi.boolean() + }).unknown() }, // uses common parseKey method that returns a `key` @@ -299,7 +283,7 @@ exports.ls = { const recursive = request.query && request.query.recursive === 'true' const cidBase = request.query['cid-base'] - const mapLink = (link) => { + const mapLink = links => { const output = { Name: link.name, Hash: cidToString(link.cid, { base: cidBase }), @@ -328,9 +312,7 @@ exports.ls = { throw Boom.boomify(err, { message: 'Failed to list dir' }) } - return h.response({ - Objects: [{ Hash: key, Links: links.map(mapLink) }] - }) + return h.response({ Objects: [{ Hash: key, Links: links.map(mapLink) }] }) } return streamResponse(request, h, () => @@ -345,7 +327,7 @@ exports.ls = { } } -function toTypeCode(type) { +function toTypeCode (type) { switch (type) { case 'dir': return 1 @@ -358,17 +340,15 @@ function toTypeCode(type) { exports.refs = { validate: { - query: Joi.object() - .keys({ - recursive: Joi.boolean().default(false), - format: Joi.string().default(Format.default), - edges: Joi.boolean().default(false), - unique: Joi.boolean().default(false), - 'max-depth': Joi.number() - .integer() - .min(-1) - }) - .unknown() + query: Joi.object().keys({ + recursive: Joi.boolean().default(false), + format: Joi.string().default(Format.default), + edges: Joi.boolean().default(false), + unique: Joi.boolean().default(false), + 'max-depth': Joi.number() + .integer() + .min(-1) + }).unknown() }, // uses common parseKey method that returns a `key` From c32b769c19c47979a9ed316cd55dc5d919c324a4 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:24:42 -0500 Subject: [PATCH 08/20] Undoing accidental lerna changes --- examples/browser-parceljs/package.json | 50 +++++++++++++++++--------- examples/custom-libp2p/package.json | 40 +++++++++++++-------- 2 files changed, 59 insertions(+), 31 deletions(-) diff --git a/examples/browser-parceljs/package.json b/examples/browser-parceljs/package.json index d8c693d8f1..4014ebc0db 100644 --- a/examples/browser-parceljs/package.json +++ b/examples/browser-parceljs/package.json @@ -1,18 +1,34 @@ { - "name": "example-browser-parceljs", - "version": "1.0.0", - "description": "", - "main": "index.js", - "private": true, - "browserslist": [ - "last 2 Chrome versions" - ], - "keywords": [], - "author": "", - "license": "ISC", - "devDependencies": { - "@babel/core": "^7.1.6", - "@babel/preset-env": "^7.1.6", - "parcel-bundler": "^1.10.3" - } -} + "name": "example-browser-parceljs", + "version": "1.0.0", + "description": "", + "main": "index.js", + "private": true, + "browserslist": [ + "last 2 Chrome versions" + ], + "scripts": { + "clean": "rm -rf ./dist", + "lint": "standard public/**/*.js", + "start": "parcel public/index.html", + "build": "parcel build public/index.html --public-url ./", + "test": "test-ipfs-example" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "ipfs": "^0.41.0" + }, + "devDependencies": { + "@babel/cli": "^7.1.5", + "@babel/core": "^7.1.6", + "@babel/preset-env": "^7.1.6", + "babel-plugin-syntax-async-functions": "^6.13.0", + "babel-plugin-transform-regenerator": "^6.26.0", + "babel-polyfill": "^6.26.0", + "parcel-bundler": "^1.10.3", + "standard": "^13.1.0", + "test-ipfs-example": "^1.0.0" + } +} \ No newline at end of file diff --git a/examples/custom-libp2p/package.json b/examples/custom-libp2p/package.json index 534517d043..35b6d23faa 100644 --- a/examples/custom-libp2p/package.json +++ b/examples/custom-libp2p/package.json @@ -1,15 +1,27 @@ { - "name": "example-custom-libp2p", - "version": "0.1.0", - "description": "Customizing your libp2p node", - "main": "index.js", - "private": true, - "license": "MIT", - "dependencies": { - "libp2p": "^0.27.0-rc.0", - "libp2p-bootstrap": "^0.10.3", - "libp2p-mdns": "^0.13.1", - "libp2p-secio": "^0.12.2", - "libp2p-tcp": "^0.14.3" - } -} + "name": "example-custom-libp2p", + "version": "0.1.0", + "description": "Customizing your libp2p node", + "main": "index.js", + "private": true, + "scripts": { + "start": "node index.js", + "test": "test-ipfs-example" + }, + "license": "MIT", + "dependencies": { + "ipfs": "^0.41.0", + "libp2p": "^0.27.0-rc.0", + "libp2p-bootstrap": "^0.10.3", + "libp2p-kad-dht": "^0.18.3", + "libp2p-mdns": "^0.13.1", + "libp2p-mplex": "^0.9.3", + "libp2p-secio": "^0.12.2", + "libp2p-spdy": "^0.13.3", + "libp2p-tcp": "^0.14.3" + }, + "devDependencies": { + "execa": "^3.2.0", + "test-ipfs-example": "^1.0.0" + } +} \ No newline at end of file From dd88b648c42a1b631e3bfa9dbdf8d2a7bdd664a7 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:26:20 -0500 Subject: [PATCH 09/20] Fixing root package.json --- package.json | 580 ++++++++++++++++++--------------------------------- 1 file changed, 206 insertions(+), 374 deletions(-) diff --git a/package.json b/package.json index fd0861cc6c..c0128db7c9 100644 --- a/package.json +++ b/package.json @@ -1,375 +1,207 @@ { - "name": "js-ipfs", - "version": "1.0.0", - "description": "JavaScript implementation of the IPFS specification", - "devDependencies": { - "lerna": "^3.20.2" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ipfs/js-ipfs.git" - }, - "private": true, - "contributors": [ - "David Dias ", - "Alan Shaw ", - "achingbrain ", - "Friedel Ziegelmayer ", - "Juan Batiz-Benet ", - "Hugo Dias ", - "Vasco Santos ", - "Henrique Dias ", - "Volker Mische ", - "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ ", - "Stephen Whitmore ", - "Francisco Baio Dias ", - "Matt Bell ", - "Richard Schneider ", - "Jacob Heun ", - "Marcin Rataj ", - "Pedro Teixeira ", - "Travis Person ", - "Kristoffer Ström ", - "Dmitriy Ryajov ", - "nginnever ", - "Richard Littauer ", - "Oli Evans ", - "dirkmc ", - "Diogo Silva ", - "Connor Keenan ", - "Pedro Santos ", - "Harlan T Wood ", - "Pascal Precht ", - "Maciej Krüger ", - "Michael Garvin ", - "Steven Allen ", - "Michael Muré ", - "João Antunes ", - "Enrico Marino ", - "Christian Couder ", - "Rob Brackett ", - "Mithgol ", - "Prabhakar Poudel ", - "Sangwon Hong ", - "Jonathan ", - "Gavin McDermott ", - "Mikeal Rogers ", - "Dzmitry Das ", - "Andrew de Andrade ", - "Paulo Rodrigues ", - "haad ", - "Andrew Nesbitt ", - "Marius Darila ", - "Yahya ", - "Alex Mingoia ", - "RasmusErik Voel Jensen ", - "Ryan Bell ", - "Jeromy ", - "Gorka Ludlow ", - "Dan Ordille ", - "Matt Ober ", - "samuli ", - "Maxime Lathuilière ", - "Andrey ", - "shunkin ", - "Jim Pick ", - "tcme ", - "Jonybang ", - "Raoul Millais ", - "Georgios Rassias ", - "Portia Burton ", - "Antonio Tenorio-Fornés ", - "Kevin Simper ", - "Kevin Wang ", - "Irakli Gozalishvili ", - "David Gilbertson ", - "Максим Ильин ", - "Nuno Nogueira ", - "Sid Harder ", - "seungwon-kang ", - "Arpit Agarwal <93arpit@gmail.com>", - "kevingzhang ", - "leekt216 ", - "noah the goodra ", - "priecint ", - "ron litzenberger ", - "sarthak khandelwal ", - "Paul Cowgill ", - "Adam Uhlíř ", - "Alex North ", - "André Cruz ", - "Ayush Mahajan ", - "Bernard Mordan ", - "Brian Vander Schaaf ", - "Bruno Barbieri ", - "Bruno Zell ", - "CHEVALAY JOSSELIN ", - "Caio Gondim ", - "Chance Hudson ", - "Dafeng ", - "Daniel Buchner ", - "Daniel Constantin ", - "Daniel J. O'Quinn ", - "Daniela Borges Matos de Carvalho ", - "Danny ", - "David ", - "David Braun ", - "David da Silva ", - "Davide Icardi ", - "Dietrich Ayala ", - "Dmitry Nikulin ", - "Dominic Della Valle ", - "Donatas Stundys ", - "Faheel Ahmad ", - "Felix Yan ", - "Fil ", - "Filip Š ", - "Gabriel Garrido Calvo ", - "Giles ", - "Grant Herman ", - "Henry Rodrick ", - "Heo Sangmin ", - "Holodisc ", - "Jacob Karlsson ", - "Jade Meskill ", - "James Halliday ", - "Jason Carver ", - "Jessica Schilling ", - "Joe Turgeon ", - "Joel Gustafson ", - "Johannes Wikner ", - "Jon Schlinkert ", - "Jorropo ", - "Lars Gierth ", - "Lukas Drgon ", - "Marcus Bernales ", - "Mark Robert Henderson ", - "Mat Kelly ", - "Matt Zumwalt ", - "Michael Bradley ", - "Michelle Lee ", - "Mitar ", - "Mohamed Abdulaziz ", - "Mounish Sai ", - "Nick Poulden ", - "Nicolás Santángelo ", - "Níckolas Goline ", - "Orie Steele ", - "Oskar Nyberg ", - "Pau Ramon Revilla ", - "0xflotus <0xflotus@gmail.com>", - "Pete Thomas ", - "Rod Keys ", - "Roman Khafizianov ", - "SidHarder ", - "Tapasweni Pathak ", - "Tara Vancil ", - "Terence Pae ", - "Thiago Delgado ", - "Uroš Jurglič ", - "Victor Bjelkholm ", - "Vincent Martin ", - "Vutsal Singhal ", - "Yole ", - "Zhiyuan Lin ", - "bitspill ", - "datafatmunger ", - "dmitriy ryajov ", - "elsehow ", - "ethers ", - "hapsody ", - "isan_rivkin " - ], - "dependencies": { - "@babel/cli": "^7.1.5", - "@babel/core": "^7.2.2", - "@babel/preset-env": "^7.2.3", - "@babel/preset-react": "^7.0.0", - "@hapi/ammo": "^3.1.2", - "@hapi/boom": "^7.4.3", - "@hapi/content": "^4.1.0", - "@hapi/hapi": "^18.4.0", - "@hapi/joi": "^15.1.0", - "@vue/cli-plugin-babel": "^3.9.0", - "@vue/cli-plugin-eslint": "^3.9.0", - "@vue/cli-service": "^3.9.0", - "abort-controller": "^3.0.0", - "aegir": "^21.3.0", - "any-signal": "^1.1.0", - "array-shuffle": "^1.0.1", - "async": "^3.1.0", - "babel-eslint": "^10.0.1", - "babel-loader": "^8.0.5", - "babel-plugin-syntax-async-functions": "^6.13.0", - "babel-plugin-transform-regenerator": "^6.26.0", - "babel-polyfill": "^6.26.0", - "base64url": "^3.0.1", - "bignumber.js": "^9.0.0", - "binary-querystring": "^0.1.2", - "bl": "^4.0.0", - "browser-process-platform": "^0.1.1", - "browserify": "^16.2.3", - "bs58": "^4.0.1", - "buffer": "^5.4.2", - "byteman": "^1.3.5", - "chai": "^4.2.0", - "chai-as-promised": "^7.1.1", - "chai-things": "^0.2.0", - "chromedriver": "^79.0.0", - "cid-tool": "^0.4.0", - "cids": "^0.7.3", - "class-is": "^1.1.0", - "clear-module": "^4.0.0", - "concat-stream": "^2.0.0", - "copy-webpack-plugin": "^5.0.4", - "core-js": "^2.6.5", - "cross-env": "^7.0.0", - "dag-cbor-links": "^1.3.2", - "datastore-core": "^0.7.0", - "datastore-fs": "^0.9.1", - "datastore-level": "^0.14.1", - "datastore-pubsub": "^0.3.0", - "debug": "^4.1.0", - "delay": "^4.3.0", - "detect-node": "^2.0.4", - "detect-webworker": "^1.0.0", - "dirty-chai": "^2.0.1", - "dlv": "^1.1.3", - "dot-prop": "^5.0.0", - "electron": "^6.0.0", - "electron-rebuild": "^1.8.4", - "err-code": "^2.0.0", - "eslint": "^5.16.0", - "eslint-plugin-vue": "^5.0.0", - "execa": "^3.2.0", - "file-type": "^12.0.1", - "fnv1a": "^1.0.1", - "form-data": "^3.0.0", - "fs-extra": "^8.1.0", - "get-folder-size": "^2.0.0", - "go-ipfs-dep": "0.4.23-3", - "hamt-sharding": "^1.0.0", - "hapi-pino": "^6.1.0", - "hashlru": "^2.3.0", - "hat": "0.0.3", - "html-webpack-plugin": "^3.2.0", - "http-server": "^0.11.1", - "interface-datastore": "^0.8.0", - "ipfs-bitswap": "^0.27.1", - "ipfs-block": "^0.8.1", - "ipfs-block-service": "^0.16.0", - "ipfs-http-response": "^0.5.0", - "ipfs-multipart": "file:/Users/paulcowgill/Code/contract-work/3box/js-ipfs/packages/ipfs/.connect-deps-cache/ipfs-multipart-0.3.0-1583254040397.tgz", - "ipfs-pubsub-room": "^2.0.1", - "ipfs-repo": "^0.30.1", - "ipfs-unixfs": "^1.0.0", - "ipfs-unixfs-exporter": "^1.0.1", - "ipfs-unixfs-importer": "^1.0.1", - "ipfsd-ctl": "^3.0.0", - "ipld": "^0.25.0", - "ipld-bitcoin": "^0.3.0", - "ipld-dag-cbor": "^0.15.1", - "ipld-dag-pb": "^0.18.2", - "ipld-ethereum": "^4.0.0", - "ipld-git": "^0.5.0", - "ipld-raw": "^4.0.1", - "ipld-zcash": "^0.4.0", - "ipns": "^0.7.0", - "is-domain-name": "^1.0.1", - "is-electron": "^2.2.0", - "is-ipfs": "^0.6.1", - "it-all": "^1.0.1", - "it-concat": "^1.0.0", - "it-drain": "^1.0.0", - "it-first": "^1.0.1", - "it-glob": "0.0.7", - "it-last": "^1.0.1", - "it-multipart": "^1.0.1", - "it-pipe": "^1.1.0", - "it-pushable": "^1.3.1", - "it-tar": "^1.2.1", - "it-to-stream": "^0.1.1", - "iterable-ndjson": "^1.1.0", - "joi-browser": "^13.4.0", - "jsondiffpatch": "^0.3.11", - "just-safe-set": "^2.1.0", - "ky": "^0.15.0", - "ky-universal": "^0.3.0", - "libp2p": "^0.27.2", - "libp2p-bootstrap": "^0.10.2", - "libp2p-crypto": "^0.17.1", - "libp2p-delegated-content-routing": "^0.4.3", - "libp2p-delegated-peer-routing": "^0.4.1", - "libp2p-floodsub": "^0.20.0", - "libp2p-gossipsub": "^0.2.3", - "libp2p-kad-dht": "^0.18.3", - "libp2p-keychain": "^0.6.0", - "libp2p-mdns": "^0.13.0", - "libp2p-mplex": "^0.9.3", - "libp2p-record": "^0.7.0", - "libp2p-secio": "^0.12.1", - "libp2p-spdy": "^0.13.3", - "libp2p-tcp": "^0.14.2", - "libp2p-webrtc-star": "^0.17.6", - "libp2p-websockets": "^0.13.3", - "mafmt": "^7.0.0", - "memdown": "^5.1.0", - "merge-options": "^2.0.0", - "mime-sniffer": "~0.0.3", - "mortice": "^2.0.0", - "multiaddr": "^7.2.1", - "multiaddr-to-uri": "^5.1.0", - "multibase": "^0.6.0", - "multicodec": "^1.0.0", - "multihashes": "^0.4.14", - "multihashing-async": "^0.8.0", - "ncp": "^2.0.0", - "nightwatch": "^1.2.4", - "nock": "^11.7.2", - "nyc": "^15.0.0", - "p-defer": "^3.0.0", - "p-event": "^4.1.0", - "p-map": "^3.0.0", - "p-queue": "^6.1.0", - "parcel-bundler": "^1.6.2", - "parse-duration": "^0.1.2", - "peer-id": "^0.13.5", - "peer-info": "^0.17.0", - "pretty-bytes": "^5.3.0", - "progress": "^2.0.1", - "prom-client": "^11.5.3", - "prometheus-gc-stats": "^0.6.0", - "protons": "^1.0.1", - "qs": "^6.5.2", - "react": "^16.7.0", - "react-dom": "^16.7.0", - "react-hot-loader": "^4.8.8", - "react-scripts": "^3.2.0", - "readable-stream": "^3.4.0", - "request": "^2.88.0", - "rimraf": "^3.0.0", - "semver": "^7.1.2", - "sinon": "^8.0.4", - "standard": "^13.1.0", - "stream-to-it": "^0.2.0", - "stream-to-promise": "^2.2.0", - "streaming-iterables": "^4.1.1", - "string-argv": "^0.3.1", - "temp": "^0.9.0", - "temp-write": "^4.0.0", - "terser-webpack-plugin": "^1.2.1", - "timeout-abort-controller": "^1.1.0", - "update-notifier": "^4.0.0", - "uri-to-multiaddr": "^3.0.2", - "varint": "^5.0.0", - "videostream": "^3.2.0", - "vue": "^2.6.10", - "vue-template-compiler": "^2.6.10", - "webpack": "^4.28.4", - "webpack-cli": "^3.0.8", - "webpack-dev-server": "^3.1.14", - "which": "^2.0.1", - "yargs": "^15.1.0", - "yargs-promise": "^1.1.0" - } -} + "name": "js-ipfs", + "version": "1.0.0", + "description": "JavaScript implementation of the IPFS specification", + "scripts": { + "postinstall": "lerna bootstrap", + "reset": "lerna run clean && rm -rf examples/*/node_modules && rm -rf packages/*/node_modules node_modules", + "test": "lerna run test", + "test:node": "lerna run test:node", + "test:browser": "lerna run test:browser", + "test:webworker": "lerna run test:webworker", + "test:electron": "lerna run test:electron", + "test:electron-main": "lerna run test:electron-main", + "test:electron-renderer": "lerna run test:electron-renderer", + "test:cli": "lerna run test:cli", + "test:interop:node": "lerna run test:interop:node", + "test:interop:browser": "lerna run test:interop:browser", + "test:interop:electron-main": "lerna run test:interop:electron-main", + "test:interop:electron-renderer": "lerna run test:interop:electron-renderer", + "coverage": "lerna run coverage", + "build": "lerna run build", + "clean": "lerna run clean", + "lint": "lerna run lint", + "dep-check": "lerna run dep-check", + "release": "npm run update-contributors && lerna publish", + "release:rc": "lerna publish --canary --preid rc --dist-tag next", + "update-contributors": "aegir release --lint=false --test=false --bump=false --build=false --changelog=false --commit=false --tag=false --push=false --ghrelease=false --docs=false --publish=false" + }, + "devDependencies": { + "lerna": "^3.20.2" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/js-ipfs.git" + }, + "private": true, + "contributors": [ + "David Dias ", + "Alan Shaw ", + "achingbrain ", + "Friedel Ziegelmayer ", + "Juan Batiz-Benet ", + "Hugo Dias ", + "Vasco Santos ", + "Henrique Dias ", + "Volker Mische ", + "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ ", + "Stephen Whitmore ", + "Francisco Baio Dias ", + "Matt Bell ", + "Richard Schneider ", + "Jacob Heun ", + "Marcin Rataj ", + "Pedro Teixeira ", + "Travis Person ", + "Kristoffer Ström ", + "Dmitriy Ryajov ", + "nginnever ", + "Richard Littauer ", + "Oli Evans ", + "dirkmc ", + "Diogo Silva ", + "Connor Keenan ", + "Pedro Santos ", + "Harlan T Wood ", + "Pascal Precht ", + "Maciej Krüger ", + "Michael Garvin ", + "Steven Allen ", + "Michael Muré ", + "João Antunes ", + "Enrico Marino ", + "Christian Couder ", + "Rob Brackett ", + "Mithgol ", + "Prabhakar Poudel ", + "Sangwon Hong ", + "Jonathan ", + "Gavin McDermott ", + "Mikeal Rogers ", + "Dzmitry Das ", + "Andrew de Andrade ", + "Paulo Rodrigues ", + "haad ", + "Andrew Nesbitt ", + "Marius Darila ", + "Yahya ", + "Alex Mingoia ", + "RasmusErik Voel Jensen ", + "Ryan Bell ", + "Jeromy ", + "Gorka Ludlow ", + "Dan Ordille ", + "Matt Ober ", + "samuli ", + "Maxime Lathuilière ", + "Andrey ", + "shunkin ", + "Jim Pick ", + "tcme ", + "Jonybang ", + "Raoul Millais ", + "Georgios Rassias ", + "Portia Burton ", + "Antonio Tenorio-Fornés ", + "Kevin Simper ", + "Kevin Wang ", + "Irakli Gozalishvili ", + "David Gilbertson ", + "Максим Ильин ", + "Nuno Nogueira ", + "Sid Harder ", + "seungwon-kang ", + "Arpit Agarwal <93arpit@gmail.com>", + "kevingzhang ", + "leekt216 ", + "noah the goodra ", + "priecint ", + "ron litzenberger ", + "sarthak khandelwal ", + "Paul Cowgill ", + "Adam Uhlíř ", + "Alex North ", + "André Cruz ", + "Ayush Mahajan ", + "Bernard Mordan ", + "Brian Vander Schaaf ", + "Bruno Barbieri ", + "Bruno Zell ", + "CHEVALAY JOSSELIN ", + "Caio Gondim ", + "Chance Hudson ", + "Dafeng ", + "Daniel Buchner ", + "Daniel Constantin ", + "Daniel J. O'Quinn ", + "Daniela Borges Matos de Carvalho ", + "Danny ", + "David ", + "David Braun ", + "David da Silva ", + "Davide Icardi ", + "Dietrich Ayala ", + "Dmitry Nikulin ", + "Dominic Della Valle ", + "Donatas Stundys ", + "Faheel Ahmad ", + "Felix Yan ", + "Fil ", + "Filip Š ", + "Gabriel Garrido Calvo ", + "Giles ", + "Grant Herman ", + "Henry Rodrick ", + "Heo Sangmin ", + "Holodisc ", + "Jacob Karlsson ", + "Jade Meskill ", + "James Halliday ", + "Jason Carver ", + "Jessica Schilling ", + "Joe Turgeon ", + "Joel Gustafson ", + "Johannes Wikner ", + "Jon Schlinkert ", + "Jorropo ", + "Lars Gierth ", + "Lukas Drgon ", + "Marcus Bernales ", + "Mark Robert Henderson ", + "Mat Kelly ", + "Matt Zumwalt ", + "Michael Bradley ", + "Michelle Lee ", + "Mitar ", + "Mohamed Abdulaziz ", + "Mounish Sai ", + "Nick Poulden ", + "Nicolás Santángelo ", + "Níckolas Goline ", + "Orie Steele ", + "Oskar Nyberg ", + "Pau Ramon Revilla ", + "0xflotus <0xflotus@gmail.com>", + "Pete Thomas ", + "Rod Keys ", + "Roman Khafizianov ", + "SidHarder ", + "Tapasweni Pathak ", + "Tara Vancil ", + "Terence Pae ", + "Thiago Delgado ", + "Uroš Jurglič ", + "Victor Bjelkholm ", + "Vincent Martin ", + "Vutsal Singhal ", + "Yole ", + "Zhiyuan Lin ", + "bitspill ", + "datafatmunger ", + "dmitriy ryajov ", + "elsehow ", + "ethers ", + "hapsody ", + "isan_rivkin " + ] +} \ No newline at end of file From 6f4e9109aeb3f5f6deb7f1655dc2797de85c0248 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:30:10 -0500 Subject: [PATCH 10/20] Undoing autoformatting --- packages/ipfs/src/http/api/resources/dag.js | 10 ++++----- .../src/http/api/resources/files-regular.js | 22 ++++++++----------- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/dag.js b/packages/ipfs/src/http/api/resources/dag.js index deb0dd40b6..07aa33a3bb 100644 --- a/packages/ipfs/src/http/api/resources/dag.js +++ b/packages/ipfs/src/http/api/resources/dag.js @@ -8,7 +8,9 @@ const multibase = require('multibase') const multicodec = require('multicodec') const Boom = require('@hapi/boom') const debug = require('debug') -const { cidToString } = require('../../../utils/cid') +const { + cidToString +} = require('../../../utils/cid') const all = require('it-all') const log = debug('ipfs:http-api:dag') log.error = debug('ipfs:http-api:dag:error') @@ -120,9 +122,7 @@ const encodeBufferKeys = (obj, encoding) => { exports.get = { validate: { query: Joi.object().keys({ - 'data-encoding': Joi.string() - .valid('text', 'base64', 'hex') - .default('text'), + 'data-encoding': Joi.string().valid('text', 'base64', 'hex').default('text'), 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -181,7 +181,7 @@ exports.put = { // pre request handler that parses the args and returns `node` // which is assigned to `request.pre.args` - async parsePargs (request, h) { + async parseArgs (request, h) { if (!request.payload) { throw Boom.badRequest("File argument 'object data' is required") } diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 9de4dea8a0..4324ad8d2f 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -19,7 +19,7 @@ const ndjson = require('iterable-ndjson') const { map } = require('streaming-iterables') const streamResponse = require('../../utils/stream-response') -const toBuffer = async function*(source) { +const toBuffer = async function * (source) { for await (const chunk of source) { yield chunk.slice() } @@ -67,12 +67,12 @@ exports.cat = { parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { + handler (request, h) { const { ipfs } = request.server.app const { key, options } = request.pre.args return streamResponse(request, h, () => ipfs.cat(key, options), { - onError(err) { + onError (err) { err.message = err.message === 'file does not exist' ? err.message : 'Failed to cat file: ' + err.message @@ -86,7 +86,7 @@ exports.get = { parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { + handler (request, h) { const { ipfs } = request.server.app const { key } = request.pre.args @@ -136,7 +136,7 @@ exports.add = { .options({ allowUnknown: true }) }, - handler(request, h) { + handler (request, h) { if (!request.payload) { throw Boom.badRequest('Array, Buffer, or String is required.') } @@ -219,7 +219,7 @@ exports.add = { blockWriteConcurrency: request.query['block-write-concurrency'] }) }, - map((file) => { + map(file => { const entry = { Name: file.path, Hash: cidToString(file.cid, { base: request.query['cid-base'] }), @@ -283,7 +283,7 @@ exports.ls = { const recursive = request.query && request.query.recursive === 'true' const cidBase = request.query['cid-base'] - const mapLink = links => { + const mapLink = link => { const output = { Name: link.name, Hash: cidToString(link.cid, { base: cidBase }), @@ -318,9 +318,7 @@ exports.ls = { return streamResponse(request, h, () => pipe( ipfs.ls(key, { recursive }), - map((link) => ({ - Objects: [{ Hash: key, Links: [mapLink(link)] }] - })), + map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), ndjson.stringify ) ) @@ -345,9 +343,7 @@ exports.refs = { format: Joi.string().default(Format.default), edges: Joi.boolean().default(false), unique: Joi.boolean().default(false), - 'max-depth': Joi.number() - .integer() - .min(-1) + 'max-depth': Joi.number().integer().min(-1) }).unknown() }, From 24d6a9ccf8881cce89ad82119203026242d442a9 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:30:44 -0500 Subject: [PATCH 11/20] Undoing autoformatting --- packages/ipfs/src/http/api/resources/files-regular.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 4324ad8d2f..12d7808968 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -351,7 +351,7 @@ exports.refs = { parseArgs: exports.parseKey, // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler(request, h) { + handler (request, h) { const { ipfs } = request.server.app const { key } = request.pre.args @@ -375,7 +375,7 @@ exports.refs = { exports.refs.local = { // main route handler - handler(request, h) { + handler (request, h) { const { ipfs } = request.server.app return streamResponse(request, h, () => From 6d6d6f24bff03d99e4202fd67c355d5e4b853bb0 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:33:49 -0500 Subject: [PATCH 12/20] Undoing autoformatting --- .../src/http/api/resources/files-regular.js | 39 ++++++++----------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index 12d7808968..d88ccc9ff9 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -157,8 +157,7 @@ exports.add = { output.write(JSON.stringify({ Name: currentFileName, Bytes: bytes - }) + '\n' - ) + }) + '\n') } pipe( @@ -315,13 +314,11 @@ exports.ls = { return h.response({ Objects: [{ Hash: key, Links: links.map(mapLink) }] }) } - return streamResponse(request, h, () => - pipe( - ipfs.ls(key, { recursive }), - map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), - ndjson.stringify - ) - ) + return streamResponse(request, h, () => pipe( + ipfs.ls(key, { recursive }), + map(link => ({ Objects: [{ Hash: key, Links: [mapLink(link)] }] })), + ndjson.stringify + )) } } @@ -363,13 +360,11 @@ exports.refs = { maxDepth: request.query['max-depth'] } - return streamResponse(request, h, () => - pipe( - ipfs.refs(key, options), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ) + return streamResponse(request, h, () => pipe( + ipfs.refs(key, options), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + )) } } @@ -378,12 +373,10 @@ exports.refs.local = { handler (request, h) { const { ipfs } = request.server.app - return streamResponse(request, h, () => - pipe( - ipfs.refs.local(), - map(({ ref, err }) => ({ Ref: ref, Err: err })), - ndjson.stringify - ) - ) + return streamResponse(request, h, () => pipe( + ipfs.refs.local(), + map(({ ref, err }) => ({ Ref: ref, Err: err })), + ndjson.stringify + )) } } From 3633f92118d16f4c6eb54f85ce0f2cfdeb654a39 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 14:35:32 -0500 Subject: [PATCH 13/20] Undoing autoformatting --- .../src/http/api/resources/files-regular.js | 37 ++++++++----------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/packages/ipfs/src/http/api/resources/files-regular.js b/packages/ipfs/src/http/api/resources/files-regular.js index d88ccc9ff9..3f26572986 100644 --- a/packages/ipfs/src/http/api/resources/files-regular.js +++ b/packages/ipfs/src/http/api/resources/files-regular.js @@ -90,29 +90,24 @@ exports.get = { const { ipfs } = request.server.app const { key } = request.pre.args - return streamResponse(request, h, () => - pipe( - ipfs.get(key), - async function * (source) { - for await (const file of source) { - const header = { - name: file.path - } + return streamResponse(request, h, () => pipe( + ipfs.get(key), + async function * (source) { + for await (const file of source) { + const header = { + name: file.path + } - if (file.content) { - yield { - header: { ...header, size: file.size }, - body: toBuffer(file.content) - } - } else { - yield { header: { ...header, type: 'directory' } } - } + if (file.content) { + yield { header: { ...header, size: file.size }, body: toBuffer(file.content) } + } else { + yield { header: { ...header, type: 'directory' } } } - }, - tar.pack(), - toBuffer - ) - ) + } + }, + tar.pack(), + toBuffer + )) } } From 4147294effecebf676e852550bd6e3ddd390e178 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 15:56:17 -0500 Subject: [PATCH 14/20] Handle case where responses have no body with a getter --- packages/ipfs-http-client/src/add/index.js | 10 +-- packages/ipfs-http-client/src/block/rm.js | 4 +- packages/ipfs-http-client/src/cat.js | 4 +- .../ipfs-http-client/src/dht/find-peer.js | 4 +- .../ipfs-http-client/src/dht/find-provs.js | 4 +- packages/ipfs-http-client/src/dht/get.js | 4 +- packages/ipfs-http-client/src/dht/provide.js | 4 +- packages/ipfs-http-client/src/dht/put.js | 4 +- packages/ipfs-http-client/src/dht/query.js | 4 +- packages/ipfs-http-client/src/files/ls.js | 4 +- packages/ipfs-http-client/src/files/read.js | 4 +- packages/ipfs-http-client/src/get.js | 4 +- .../src/lib/stream-to-async-iterable.js | 27 +++++++ packages/ipfs-http-client/src/log/tail.js | 4 +- packages/ipfs-http-client/src/ls.js | 4 +- packages/ipfs-http-client/src/name/resolve.js | 4 +- packages/ipfs-http-client/src/pin/ls.js | 4 +- packages/ipfs-http-client/src/ping.js | 4 +- .../ipfs-http-client/src/pubsub/subscribe.js | 4 +- packages/ipfs-http-client/src/refs/index.js | 4 +- packages/ipfs-http-client/src/refs/local.js | 4 +- packages/ipfs-http-client/src/repo/gc.js | 4 +- packages/ipfs-http-client/src/stats/bw.js | 4 +- .../test/lib.stream-to-async-iterable.spec.js | 71 +++++++++++++++++++ 24 files changed, 146 insertions(+), 46 deletions(-) create mode 100644 packages/ipfs-http-client/src/lib/stream-to-async-iterable.js create mode 100644 packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js diff --git a/packages/ipfs-http-client/src/add/index.js b/packages/ipfs-http-client/src/add/index.js index cd162c2f94..d31895d8fb 100644 --- a/packages/ipfs-http-client/src/add/index.js +++ b/packages/ipfs-http-client/src/add/index.js @@ -3,7 +3,7 @@ const ndjson = require('iterable-ndjson') const CID = require('cids') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const { toFormData } = require('./form-data') const toCamel = require('../lib/object-to-camel') @@ -32,16 +32,18 @@ module.exports = configure(({ ky }) => { if (options.preload != null) searchParams.set('preload', options.preload) if (options.fileImportConcurrency != null) searchParams.set('file-import-concurrency', options.fileImportConcurrency) if (options.blockWriteConcurrency != null) searchParams.set('block-write-concurrency', options.blockWriteConcurrency) - + + const formData = await toFormData(input) + const res = await ky.post('add', { timeout: options.timeout, signal: options.signal, headers: options.headers, searchParams, - body: await toFormData(input) + body: formData }) - for await (let file of ndjson(toIterable(res.body))) { + for await (let file of ndjson(toAsyncIterable(res))) { file = toCamel(file) if (options.progress && file.bytes) { diff --git a/packages/ipfs-http-client/src/block/rm.js b/packages/ipfs-http-client/src/block/rm.js index f8fc8c1039..c739887751 100644 --- a/packages/ipfs-http-client/src/block/rm.js +++ b/packages/ipfs-http-client/src/block/rm.js @@ -3,7 +3,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * rm (cid, options) { @@ -29,7 +29,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const removed of ndjson(toIterable(res.body))) { + for await (const removed of ndjson(toAsyncIterable(res))) { yield toCoreInterface(removed) } } diff --git a/packages/ipfs-http-client/src/cat.js b/packages/ipfs-http-client/src/cat.js index 3d4971a1c7..67fe925613 100644 --- a/packages/ipfs-http-client/src/cat.js +++ b/packages/ipfs-http-client/src/cat.js @@ -3,7 +3,7 @@ const CID = require('cids') const { Buffer } = require('buffer') const configure = require('./lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('./lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * cat (path, options) { @@ -27,7 +27,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const chunk of toIterable(res.body)) { + for await (const chunk of toAsyncIterable(res)) { yield Buffer.from(chunk) } } diff --git a/packages/ipfs-http-client/src/dht/find-peer.js b/packages/ipfs-http-client/src/dht/find-peer.js index ef5c6c92a9..b788de28db 100644 --- a/packages/ipfs-http-client/src/dht/find-peer.js +++ b/packages/ipfs-http-client/src/dht/find-peer.js @@ -5,7 +5,7 @@ const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function findPeer (peerId, options) { @@ -22,7 +22,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const message of ndjson(toIterable(res.body))) { + for await (const message of ndjson(toAsyncIterable(res))) { // 3 = QueryError // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L388-L389 diff --git a/packages/ipfs-http-client/src/dht/find-provs.js b/packages/ipfs-http-client/src/dht/find-provs.js index e70935dec5..e0239bbd81 100644 --- a/packages/ipfs-http-client/src/dht/find-provs.js +++ b/packages/ipfs-http-client/src/dht/find-provs.js @@ -4,7 +4,7 @@ const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * findProvs (cid, options) { @@ -22,7 +22,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const message of ndjson(toIterable(res.body))) { + for await (const message of ndjson(toAsyncIterable(res))) { // 3 = QueryError // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 // https://github.com/libp2p/go-libp2p-kad-dht/blob/master/routing.go#L525-L526 diff --git a/packages/ipfs-http-client/src/dht/get.js b/packages/ipfs-http-client/src/dht/get.js index 4be7b80c28..4f162a3652 100644 --- a/packages/ipfs-http-client/src/dht/get.js +++ b/packages/ipfs-http-client/src/dht/get.js @@ -2,7 +2,7 @@ const { Buffer } = require('buffer') const ndjson = require('iterable-ndjson') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component') const configure = require('../lib/configure') @@ -23,7 +23,7 @@ module.exports = configure(({ ky }) => { headers: options.headers }) - for await (const message of ndjson(toIterable(res.body))) { + for await (const message of ndjson(toAsyncIterable(res))) { // 3 = QueryError // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473 diff --git a/packages/ipfs-http-client/src/dht/provide.js b/packages/ipfs-http-client/src/dht/provide.js index cdcae2f84a..aa385cbe6d 100644 --- a/packages/ipfs-http-client/src/dht/provide.js +++ b/packages/ipfs-http-client/src/dht/provide.js @@ -4,7 +4,7 @@ const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { @@ -24,7 +24,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (let message of ndjson(toIterable(res.body))) { + for await (let message of ndjson(toAsyncIterable(res))) { // 3 = QueryError // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L283-L284 diff --git a/packages/ipfs-http-client/src/dht/put.js b/packages/ipfs-http-client/src/dht/put.js index 8dc924f7a3..b3cf9efbc8 100644 --- a/packages/ipfs-http-client/src/dht/put.js +++ b/packages/ipfs-http-client/src/dht/put.js @@ -5,7 +5,7 @@ const CID = require('cids') const multiaddr = require('multiaddr') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const encodeBufferURIComponent = require('../lib/encode-buffer-uri-component') const toCamel = require('../lib/object-to-camel') @@ -26,7 +26,7 @@ module.exports = configure(({ ky }) => { headers: options.headers }) - for await (let message of ndjson(toIterable(res.body))) { + for await (let message of ndjson(toAsyncIterable(res))) { // 3 = QueryError // https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L18 // https://github.com/ipfs/go-ipfs/blob/eb11f569b064b960d1aba4b5b8ca155a3bd2cb21/core/commands/dht.go#L472-L473 diff --git a/packages/ipfs-http-client/src/dht/query.js b/packages/ipfs-http-client/src/dht/query.js index b2b8785723..412cf49611 100644 --- a/packages/ipfs-http-client/src/dht/query.js +++ b/packages/ipfs-http-client/src/dht/query.js @@ -3,7 +3,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') const multiaddr = require('multiaddr') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const configure = require('../lib/configure') const toCamel = require('../lib/object-to-camel') @@ -22,7 +22,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (let message of ndjson(toIterable(res.body))) { + for await (let message of ndjson(toAsyncIterable(res))) { message = toCamel(message) message.id = new CID(message.id) message.responses = (message.responses || []).map(({ ID, Addrs }) => ({ diff --git a/packages/ipfs-http-client/src/files/ls.js b/packages/ipfs-http-client/src/files/ls.js index f07c65c097..6a61e6c2ad 100644 --- a/packages/ipfs-http-client/src/files/ls.js +++ b/packages/ipfs-http-client/src/files/ls.js @@ -2,7 +2,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const configure = require('../lib/configure') const toCamelWithMetadata = require('../lib/object-to-camel-with-metadata') @@ -30,7 +30,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const result of ndjson(toIterable(res.body))) { + for await (const result of ndjson(toAsyncIterable(res))) { // go-ipfs does not yet support the "stream" option if ('Entries' in result) { for (const entry of result.Entries || []) { diff --git a/packages/ipfs-http-client/src/files/read.js b/packages/ipfs-http-client/src/files/read.js index 1800609d0a..f2ba05af6c 100644 --- a/packages/ipfs-http-client/src/files/read.js +++ b/packages/ipfs-http-client/src/files/read.js @@ -2,7 +2,7 @@ const { Buffer } = require('buffer') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * read (path, options) { @@ -20,7 +20,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const chunk of toIterable(res.body)) { + for await (const chunk of toAsyncIterable(res)) { yield Buffer.from(chunk) } } diff --git a/packages/ipfs-http-client/src/get.js b/packages/ipfs-http-client/src/get.js index 635f8b34fc..221a7616dd 100644 --- a/packages/ipfs-http-client/src/get.js +++ b/packages/ipfs-http-client/src/get.js @@ -4,7 +4,7 @@ const configure = require('./lib/configure') const Tar = require('it-tar') const { Buffer } = require('buffer') const CID = require('cids') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('./lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * get (path, options) { @@ -38,7 +38,7 @@ module.exports = configure(({ ky }) => { const extractor = Tar.extract() - for await (const { header, body } of extractor(toIterable(res.body))) { + for await (const { header, body } of extractor(toAsyncIterable(res))) { if (header.type === 'directory') { yield { path: header.name diff --git a/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js b/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js new file mode 100644 index 0000000000..6121b4f351 --- /dev/null +++ b/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js @@ -0,0 +1,27 @@ +'use strict' + +const toAsyncIterableOriginal = require('stream-to-it/source') + +// Note: Turned this into a helper that wraps `stream-to-it/source` +// to handle the body undefined case without requiring that other libs +// that consume that package such as `js-ipfs` and `js-ipfs-utils` modify +// how they use it + +module.exports = function toAsyncIterable (res) { + const { body } = res + + // An env where res.body getter for ReadableStream with getReader + // is not supported, for example in React Native + if (!body) { + if (res.arrayBuffer) { + return (async function * () { + const arrayBuffer = await res.arrayBuffer() + yield arrayBuffer + })() + } else { + throw new Error('Neither Response.body nor Response.arrayBuffer is defined') + } + } + + return toAsyncIterableOriginal(body) +} \ No newline at end of file diff --git a/packages/ipfs-http-client/src/log/tail.js b/packages/ipfs-http-client/src/log/tail.js index 74b72b2c29..652741dadc 100644 --- a/packages/ipfs-http-client/src/log/tail.js +++ b/packages/ipfs-http-client/src/log/tail.js @@ -2,7 +2,7 @@ const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * tail (options) { @@ -15,6 +15,6 @@ module.exports = configure(({ ky }) => { searchParams: options.searchParams }) - yield * ndjson(toIterable(res.body)) + yield * ndjson(toAsyncIterable(res)) } }) diff --git a/packages/ipfs-http-client/src/ls.js b/packages/ipfs-http-client/src/ls.js index ec7e37dfb1..e29038ebdf 100644 --- a/packages/ipfs-http-client/src/ls.js +++ b/packages/ipfs-http-client/src/ls.js @@ -3,7 +3,7 @@ const { Buffer } = require('buffer') const CID = require('cids') const ndjson = require('iterable-ndjson') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('./lib/stream-to-async-iterable') const configure = require('./lib/configure') module.exports = configure(({ ky }) => { @@ -25,7 +25,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (let result of ndjson(toIterable(res.body))) { + for await (let result of ndjson(toAsyncIterable(res))) { result = result.Objects if (!result) { diff --git a/packages/ipfs-http-client/src/name/resolve.js b/packages/ipfs-http-client/src/name/resolve.js index e7eb20b4f0..0eeebf0bb5 100644 --- a/packages/ipfs-http-client/src/name/resolve.js +++ b/packages/ipfs-http-client/src/name/resolve.js @@ -2,7 +2,7 @@ const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * (path, options) { @@ -23,7 +23,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const result of ndjson(toIterable(res.body))) { + for await (const result of ndjson(toAsyncIterable(res))) { yield result.Path } } diff --git a/packages/ipfs-http-client/src/pin/ls.js b/packages/ipfs-http-client/src/pin/ls.js index f9e0968ace..b893894774 100644 --- a/packages/ipfs-http-client/src/pin/ls.js +++ b/packages/ipfs-http-client/src/pin/ls.js @@ -3,7 +3,7 @@ const ndjson = require('iterable-ndjson') const CID = require('cids') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * ls (path, options) { @@ -28,7 +28,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const pin of ndjson(toIterable(res.body))) { + for await (const pin of ndjson(toAsyncIterable(res))) { if (pin.Keys) { // non-streaming response for (const cid of Object.keys(pin.Keys)) { yield { cid: new CID(cid), type: pin.Keys[cid].Type } diff --git a/packages/ipfs-http-client/src/ping.js b/packages/ipfs-http-client/src/ping.js index 332120934b..d158efee6f 100644 --- a/packages/ipfs-http-client/src/ping.js +++ b/packages/ipfs-http-client/src/ping.js @@ -2,7 +2,7 @@ const ndjson = require('iterable-ndjson') const configure = require('./lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('./lib/stream-to-async-iterable') const toCamel = require('./lib/object-to-camel') module.exports = configure(({ ky }) => { @@ -20,7 +20,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const chunk of ndjson(toIterable(res.body))) { + for await (const chunk of ndjson(toAsyncIterable(res))) { yield toCamel(chunk) } } diff --git a/packages/ipfs-http-client/src/pubsub/subscribe.js b/packages/ipfs-http-client/src/pubsub/subscribe.js index 188a916648..ed56b42c25 100644 --- a/packages/ipfs-http-client/src/pubsub/subscribe.js +++ b/packages/ipfs-http-client/src/pubsub/subscribe.js @@ -5,7 +5,7 @@ const bs58 = require('bs58') const { Buffer } = require('buffer') const log = require('debug')('ipfs-http-client:pubsub:subscribe') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const SubscriptionTracker = require('./subscription-tracker') module.exports = configure((config) => { @@ -49,7 +49,7 @@ module.exports = configure((config) => { clearTimeout(ffWorkaround) - readMessages(ndjson(toIterable(res.body)), { + readMessages(ndjson(toAsyncIterable(res)), { onMessage: handler, onEnd: () => subsTracker.unsubscribe(topic, handler), onError: options.onError diff --git a/packages/ipfs-http-client/src/refs/index.js b/packages/ipfs-http-client/src/refs/index.js index 05a636febc..8f21fbaacb 100644 --- a/packages/ipfs-http-client/src/refs/index.js +++ b/packages/ipfs-http-client/src/refs/index.js @@ -4,7 +4,7 @@ const configure = require('../lib/configure') const { Buffer } = require('buffer') const CID = require('cids') const ndjson = require('iterable-ndjson') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const toCamel = require('../lib/object-to-camel') module.exports = config => { @@ -49,7 +49,7 @@ module.exports = config => { searchParams }) - for await (const file of ndjson(toIterable(res.body))) { + for await (const file of ndjson(toAsyncIterable(res))) { yield toCamel(file) } } diff --git a/packages/ipfs-http-client/src/refs/local.js b/packages/ipfs-http-client/src/refs/local.js index 98e0fce405..da927edead 100644 --- a/packages/ipfs-http-client/src/refs/local.js +++ b/packages/ipfs-http-client/src/refs/local.js @@ -2,7 +2,7 @@ const configure = require('../lib/configure') const ndjson = require('iterable-ndjson') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { @@ -15,7 +15,7 @@ module.exports = configure(({ ky }) => { headers: options.headers }) - for await (const file of ndjson(toIterable(res.body))) { + for await (const file of ndjson(toAsyncIterable(res))) { yield toCamel(file) } } diff --git a/packages/ipfs-http-client/src/repo/gc.js b/packages/ipfs-http-client/src/repo/gc.js index fc60a46bc7..abaf4821d0 100644 --- a/packages/ipfs-http-client/src/repo/gc.js +++ b/packages/ipfs-http-client/src/repo/gc.js @@ -3,7 +3,7 @@ const CID = require('cids') const ndjson = require('iterable-ndjson') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * gc (peerId, options) { @@ -19,7 +19,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const gcResult of ndjson(toIterable(res.body))) { + for await (const gcResult of ndjson(toAsyncIterable(res))) { yield { err: gcResult.Error ? new Error(gcResult.Error) : null, cid: (gcResult.Key || {})['/'] ? new CID(gcResult.Key['/']) : null diff --git a/packages/ipfs-http-client/src/stats/bw.js b/packages/ipfs-http-client/src/stats/bw.js index 12bc6d44a6..5e7271ce09 100644 --- a/packages/ipfs-http-client/src/stats/bw.js +++ b/packages/ipfs-http-client/src/stats/bw.js @@ -3,7 +3,7 @@ const ndjson = require('iterable-ndjson') const Big = require('bignumber.js') const configure = require('../lib/configure') -const toIterable = require('stream-to-it/source') +const toAsyncIterable = require('../lib/stream-to-async-iterable') module.exports = configure(({ ky }) => { return async function * bw (options) { @@ -22,7 +22,7 @@ module.exports = configure(({ ky }) => { searchParams }) - for await (const stats of ndjson(toIterable(res.body))) { + for await (const stats of ndjson(toAsyncIterable(res))) { yield { totalIn: new Big(stats.TotalIn), totalOut: new Big(stats.TotalOut), diff --git a/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js b/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js new file mode 100644 index 0000000000..4e0d0a2a09 --- /dev/null +++ b/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js @@ -0,0 +1,71 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('interface-ipfs-core/src/utils/mocha') +const toAsyncIterable = require('../src/lib/stream-to-async-iterable') + +describe('lib/stream-to-async-iterable', () => { + it('should return input if already async iterable', () => { + const input = { + [Symbol.asyncIterator] () { + return this + } + } + const res = { body: input } + expect(toAsyncIterable(res)).to.equal(input) + }) + + it('should convert reader to async iterable', async () => { + const inputData = [2, 31, 3, 4] + + const input = { + getReader () { + let i = 0 + return { + read () { + return Promise.resolve( + i === inputData.length + ? { done: true } + : { value: inputData[i++] } + ) + }, + releaseLock () { } + } + } + } + const res = { body: input } + + const chunks = [] + for await (const chunk of toAsyncIterable(res)) { + chunks.push(chunk) + } + + expect(chunks).to.eql(inputData) + }) + + it('should return an async iterable even if res.body is undefined', async () => { + const inputData = [2] + const res = { + arrayBuffer () { + return Promise.resolve(inputData[0]) + } + } + + const chunks = [] + for await (const chunk of toAsyncIterable(res)) { + chunks.push(chunk) + } + + expect(chunks).to.eql(inputData) + }) + + it('should throw if res.body and res.arrayBuffer are undefined', () => { + const res = {} + expect(() => toAsyncIterable(res)).to.throw('Neither Response.body nor Response.arrayBuffer is defined') + }) + + it('should throw on unknown stream', () => { + const res = { body: {} } + expect(() => toAsyncIterable(res)).to.throw('unknown stream') + }) +}) \ No newline at end of file From 8c87db28ebe99a1d4008febbfd2a8f516e85c3ce Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 15:58:36 -0500 Subject: [PATCH 15/20] package.json newline at end --- examples/browser-parceljs/package.json | 2 +- examples/custom-libp2p/package.json | 2 +- package.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/browser-parceljs/package.json b/examples/browser-parceljs/package.json index 4014ebc0db..9a2aefbb4d 100644 --- a/examples/browser-parceljs/package.json +++ b/examples/browser-parceljs/package.json @@ -31,4 +31,4 @@ "standard": "^13.1.0", "test-ipfs-example": "^1.0.0" } -} \ No newline at end of file +} diff --git a/examples/custom-libp2p/package.json b/examples/custom-libp2p/package.json index 35b6d23faa..be3e3dce17 100644 --- a/examples/custom-libp2p/package.json +++ b/examples/custom-libp2p/package.json @@ -24,4 +24,4 @@ "execa": "^3.2.0", "test-ipfs-example": "^1.0.0" } -} \ No newline at end of file +} diff --git a/package.json b/package.json index c0128db7c9..3b6f085636 100644 --- a/package.json +++ b/package.json @@ -204,4 +204,4 @@ "hapsody ", "isan_rivkin " ] -} \ No newline at end of file +} From 7c3496c1daa1f02bfe42c8a80765aed68ba5050c Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 16:10:02 -0500 Subject: [PATCH 16/20] Missing newlines --- packages/ipfs-http-client/src/lib/stream-to-async-iterable.js | 2 +- .../ipfs-http-client/test/lib.stream-to-async-iterable.spec.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js b/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js index 6121b4f351..cdd6b36c6f 100644 --- a/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js +++ b/packages/ipfs-http-client/src/lib/stream-to-async-iterable.js @@ -24,4 +24,4 @@ module.exports = function toAsyncIterable (res) { } return toAsyncIterableOriginal(body) -} \ No newline at end of file +} diff --git a/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js b/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js index 4e0d0a2a09..2351198d4d 100644 --- a/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js +++ b/packages/ipfs-http-client/test/lib.stream-to-async-iterable.spec.js @@ -68,4 +68,4 @@ describe('lib/stream-to-async-iterable', () => { const res = { body: {} } expect(() => toAsyncIterable(res)).to.throw('unknown stream') }) -}) \ No newline at end of file +}) From b62022bc335bcb0ecc0af92451cbea5fc51dc136 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 16:21:51 -0500 Subject: [PATCH 17/20] Including local changes to ipfs-multipart --- packages/ipfs-multipart/src/it-multipart.js | 195 ++++++++++++++++++++ packages/ipfs-multipart/src/parser.js | 15 +- packages/ipfs/package.json | 2 +- 3 files changed, 210 insertions(+), 2 deletions(-) create mode 100644 packages/ipfs-multipart/src/it-multipart.js diff --git a/packages/ipfs-multipart/src/it-multipart.js b/packages/ipfs-multipart/src/it-multipart.js new file mode 100644 index 0000000000..08a4c8d379 --- /dev/null +++ b/packages/ipfs-multipart/src/it-multipart.js @@ -0,0 +1,195 @@ +'use strict' + +const bIndexOf = require('buffer-indexof') +const parseHeaders = require('parse-headers') + +module.exports = multipart + +async function* multipart(stream, boundary) { + if (!boundary) { + if ( + stream && + stream.headers && + stream.headers['content-type'] && + stream.headers['content-type'].includes('boundary') + ) { + boundary = stream.headers['content-type'].split('boundary=')[1].trim() + } else { + throw new Error('Not a multipart request') + } + } + + boundary = `--${boundary}` + const headerEnd = Buffer.from('\r\n\r\n') + + // allow pushing data back into stream + stream = prefixStream(stream) + + // consume initial boundary + await consumeUntilAfter(stream, Buffer.from(boundary)) + + for await (const chunk of stream) { + console.log('multipart chunk', chunk) + stream.push(chunk) + + const headers = (await collect(yieldUntilAfter(stream, headerEnd))).toString() + + console.log({ headers }) + + // the final boundary has `--\r\n` appended to it + if (headers === '--\r\n') { + console.log('hit final boundary') + return + } + + const yieldUntilAfterResult = yieldUntilAfter(stream, Buffer.from(`\r\n${boundary}`)) + console.log({ yieldUntilAfterResult }) + + // Just for logging + for await (const yieldUntilAfterResultChunk of yieldUntilAfterResult) { + console.log({ yieldUntilAfterResultChunk }) + } + // end of extra logging code + + // wait for this part's body to be consumed before we try reading the next one + const result = waitForStreamToBeConsumed(yieldUntilAfterResult) + + console.log({ result }) + + const part = { + headers: parseHeaders(headers), + body: result.iterator + // body: yieldUntilAfter(stream, Buffer.from(`\r\n${boundary}`)) + } + + yield part + + await result.complete + } +} + +// yield chunks of buffer until a the needle is reached. consume the needle without yielding it +async function* yieldUntilAfter(haystack, needle) { + console.log('yieldUntilAfter') + let buffer = Buffer.alloc(0) + + for await (const chunk of haystack) { + console.log('yieldUntilAfter chunk', chunk) + console.log(chunk.toString('utf8')) + buffer = Buffer.concat([buffer, chunk], buffer.length + chunk.length) // slow + + const index = bIndexOf(buffer, needle) + + if (index !== -1) { + console.log('&&&&&&') + console.log('found needle!', needle) + + // found needle + if (index > 0) { + yield buffer.slice(0, index) + } + + // consume needle but preserve rest of chunk + haystack.push(buffer.slice(index + needle.length)) + + return + } else { + console.log('????????') + console.log('did NOT find needle!') + } + + if (buffer.length > needle.length) { + // can emit the beginning chunk as it does not contain the needle + yield buffer.slice(0, buffer.length - needle.length) + + // cache the rest for next time + buffer = buffer.slice(buffer.length - needle.length) + } + } + + // yield anything left over + if (buffer.length) { + yield buffer + } +} + +async function consumeUntilAfter(haystack, needle) { + for await (const chunk of yieldUntilAfter(haystack, needle)) { + console.log('consumeUntilAfter chunk', chunk) + // eslint-disable-line no-unused-vars + } +} + +// a stream that lets us push content back into it for consumption elsewhere +function prefixStream(stream) { + const buffer = [] + const streamIterator = stream[Symbol.asyncIterator]() + + const iterator = { + [Symbol.asyncIterator]: () => { + return iterator + }, + next: () => { + if (buffer.length) { + return { + done: false, + value: buffer.shift() + } + } + + return streamIterator.next() + }, + push: function(buf) { + buffer.push(buf) + } + } + + return iterator +} + +function waitForStreamToBeConsumed(stream) { + let pending + const complete = new Promise((resolve, reject) => { + pending = { + resolve, + reject + } + }) + const streamIterator = stream[Symbol.asyncIterator]() + + const iterator = { + [Symbol.asyncIterator]: () => { + return iterator + }, + next: async () => { + try { + const next = await streamIterator.next() + + if (next.done) { + pending.resolve() + } + + return next + } catch (err) { + pending.reject(err) + } + } + } + + return { + complete, + iterator + } +} + +const collect = async (stream) => { + const buffers = [] + let size = 0 + + for await (const buf of stream) { + size += buf.length + buffers.push(buf) + } + + return Buffer.concat(buffers, size) +} diff --git a/packages/ipfs-multipart/src/parser.js b/packages/ipfs-multipart/src/parser.js index 922ee33256..0d7ee5f8ef 100644 --- a/packages/ipfs-multipart/src/parser.js +++ b/packages/ipfs-multipart/src/parser.js @@ -1,7 +1,7 @@ 'use strict' const Content = require('@hapi/content') -const multipart = require('it-multipart') +const multipart = require('./it-multipart') const multipartFormdataType = 'multipart/form-data' const applicationDirectory = 'application/x-directory' @@ -46,6 +46,14 @@ const ignore = async (stream) => { async function * parseEntry (stream, options) { for await (const part of stream) { + console.log('########') + console.log({ part }) + + // Just for logging + for await (const parseEntryChunk of part.body) { + console.log({ parseEntryChunk }) + } + // end of extra logging code if (!part.headers['content-type']) { throw new Error('No content-type in multipart part') } @@ -92,6 +100,7 @@ async function * parseEntry (stream, options) { const disposition = parseDisposition(part.headers['content-disposition']) + console.log({ disposition }) entry.name = decodeURIComponent(disposition.filename) entry.body = part.body @@ -100,7 +109,11 @@ async function * parseEntry (stream, options) { } async function * parser (stream, options) { + // console.log({ stream }); + console.log('boundary', options.boundary) for await (const entry of parseEntry(multipart(stream, options.boundary), options)) { + console.log('*********') + console.log({ entry }) if (entry.type === 'directory') { yield { type: 'directory', diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index 3166f994ea..84e5f1e69a 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -98,7 +98,7 @@ "ipfs-http-client": "^42.0.0", "ipfs-http-response": "^0.5.0", "ipfs-mfs": "^1.0.0", - "ipfs-multipart": "file:/Users/paulcowgill/Code/contract-work/3box/js-ipfs/packages/ipfs/.connect-deps-cache/ipfs-multipart-0.3.0-1583254040397.tgz", + "ipfs-multipart": "^0.3.0", "ipfs-repo": "^0.30.1", "ipfs-unixfs": "^1.0.0", "ipfs-unixfs-exporter": "^1.0.1", From b5ff4b990ade279b5529d15df00d3264b9da2e39 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Mon, 9 Mar 2020 17:12:36 -0500 Subject: [PATCH 18/20] Fix lint errors --- packages/ipfs-http-client/src/add/index.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-http-client/src/add/index.js b/packages/ipfs-http-client/src/add/index.js index d31895d8fb..1146bd91ba 100644 --- a/packages/ipfs-http-client/src/add/index.js +++ b/packages/ipfs-http-client/src/add/index.js @@ -32,9 +32,9 @@ module.exports = configure(({ ky }) => { if (options.preload != null) searchParams.set('preload', options.preload) if (options.fileImportConcurrency != null) searchParams.set('file-import-concurrency', options.fileImportConcurrency) if (options.blockWriteConcurrency != null) searchParams.set('block-write-concurrency', options.blockWriteConcurrency) - + const formData = await toFormData(input) - + const res = await ky.post('add', { timeout: options.timeout, signal: options.signal, From f92f5ed8f03e4a46c6cd364f22a16444dd51e524 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Tue, 10 Mar 2020 16:24:32 -0500 Subject: [PATCH 19/20] Moved ipfs-http-client and ipfs-utils logs to monorepo --- .../src/add/form-data.browser.js | 44 ++++++++++++++++++- .../ipfs-http-client/src/add/form-data.js | 1 + packages/ipfs-http-client/src/add/index.js | 23 +++++++++- packages/ipfs-http-client/src/dag/put.js | 1 + .../ipfs-utils/src/files/normalise-input.js | 10 +++++ 5 files changed, 77 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-http-client/src/add/form-data.browser.js b/packages/ipfs-http-client/src/add/form-data.browser.js index 484f67c6c2..17d5b3a316 100644 --- a/packages/ipfs-http-client/src/add/form-data.browser.js +++ b/packages/ipfs-http-client/src/add/form-data.browser.js @@ -5,14 +5,20 @@ const normaliseInput = require('ipfs-utils/src/files/normalise-input') const mtimeToObject = require('../lib/mtime-to-object') exports.toFormData = async input => { + console.log("toFormData browser"); const files = normaliseInput(input) + console.log({ files }) const formData = new FormData() + + // console.log({ formData }) let i = 0 for await (const file of files) { + console.log({ file }) const headers = {} if (file.mtime !== undefined && file.mtime !== null) { + console.log("file.mtime !== undefined && file.mtime !== null") const mtime = mtimeToObject(file.mtime) if (mtime) { @@ -22,29 +28,65 @@ exports.toFormData = async input => { } if (file.mode !== undefined && file.mode !== null) { + console.log("file.mode !== undefined && file.mode !== null") headers.mode = file.mode.toString(8).padStart(4, '0') } if (file.content) { + console.log("file.content", file.content) // In the browser there's _currently_ no streaming upload, buffer up our // async iterator chunks and append a big Blob :( // One day, this will be browser streams const bufs = [] for await (const chunk of file.content) { + console.log("file.content chunk", chunk) bufs.push(chunk) } - formData.append(`file-${i}`, new Blob(bufs, { type: 'application/octet-stream' }), encodeURIComponent(file.path), { + const newBlob = new Blob(bufs, { type: 'application/octet-stream' }); + console.log({ newBlob }) + const encodedUriComponent = encodeURIComponent(file.path); + + formData.append(`file-${i}`, newBlob, encodedUriComponent, { header: headers }) + + if (newBlob.data) { + const newBlobData = newBlob.data; + console.log("newBlob data", newBlobData); + } } else { formData.append(`dir-${i}`, new Blob([], { type: 'application/x-directory' }), encodeURIComponent(file.path), { header: headers }) } + const keyToGet = 'file-0' + + if (formData.entries) { + const formDataEntries = formData.entries(); + for(var pair of formDataEntries) { + console.log(pair[0]+ ', '+ pair[1]); + } + const field = formData.get(keyToGet) + console.log({ field }) + } + + if (formData.getParts) { + const formDataParts = formData.getParts(); + console.log({ formDataParts }) + const field = formDataParts.find(item => item.fieldName === keyToGet); + if (field) { + console.log({ field }) + } + } + i++ } + // for (var p of formData.entries()) { + // console.log({ p }) + // } + return formData } diff --git a/packages/ipfs-http-client/src/add/form-data.js b/packages/ipfs-http-client/src/add/form-data.js index 1ce5050da2..cb6fcfeb89 100644 --- a/packages/ipfs-http-client/src/add/form-data.js +++ b/packages/ipfs-http-client/src/add/form-data.js @@ -8,6 +8,7 @@ const { isElectronRenderer } = require('ipfs-utils/src/env') const mtimeToObject = require('../lib/mtime-to-object') exports.toFormData = async input => { + console.log("toFormData regular NOT USED IN REACT NATIVE"); const files = normaliseInput(input) const formData = new FormData() let i = 0 diff --git a/packages/ipfs-http-client/src/add/index.js b/packages/ipfs-http-client/src/add/index.js index 1146bd91ba..fc211da30b 100644 --- a/packages/ipfs-http-client/src/add/index.js +++ b/packages/ipfs-http-client/src/add/index.js @@ -9,6 +9,7 @@ const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { return async function * add (input, options) { + console.log("Add called"); options = options || {} const searchParams = new URLSearchParams(options.searchParams) @@ -33,8 +34,13 @@ module.exports = configure(({ ky }) => { if (options.fileImportConcurrency != null) searchParams.set('file-import-concurrency', options.fileImportConcurrency) if (options.blockWriteConcurrency != null) searchParams.set('block-write-concurrency', options.blockWriteConcurrency) + console.log({ input }) const formData = await toFormData(input) + // console.log({ options }) + // console.log({ searchParams }) + console.log({ formData }) + const res = await ky.post('add', { timeout: options.timeout, signal: options.signal, @@ -43,12 +49,27 @@ module.exports = configure(({ ky }) => { body: formData }) - for await (let file of ndjson(toAsyncIterable(res))) { + console.log({ res }); + + const resAsyncIterable = toAsyncIterable(res); + + console.log({ resAsyncIterable }); + + const ndjsonResAsyncIterable = ndjson(resAsyncIterable) + + console.log({ ndjsonResAsyncIterable }); + + for await (let file of ndjsonResAsyncIterable) { + console.log({ file }); file = toCamel(file) + console.log("toCamelifiedFile", file); + if (options.progress && file.bytes) { + console.log("options.progress && file.bytes"); options.progress(file.bytes) } else { + console.log("else"); yield toCoreInterface(file) } } diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index d26f8ba9e4..3b770005a8 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -8,6 +8,7 @@ const toFormData = require('../lib/buffer-to-form-data') module.exports = configure(({ ky }) => { return async (dagNode, options) => { + console.log("Using monorepo version") options = options || {} if (options.hash) { diff --git a/packages/ipfs-utils/src/files/normalise-input.js b/packages/ipfs-utils/src/files/normalise-input.js index fb892692fa..2e0bab445f 100644 --- a/packages/ipfs-utils/src/files/normalise-input.js +++ b/packages/ipfs-utils/src/files/normalise-input.js @@ -62,6 +62,7 @@ module.exports = function normaliseInput (input) { // Buffer|ArrayBuffer|TypedArray // Blob|File if (isBytes(input) || isBloby(input)) { + console.log('Bytes or blob') return (async function * () { // eslint-disable-line require-await yield toFileObject(input) })() @@ -152,6 +153,7 @@ module.exports = function normaliseInput (input) { } function toFileObject (input) { + console.log('toFileObject input', input) const obj = { path: input.path || '', mode: input.mode, @@ -159,17 +161,22 @@ function toFileObject (input) { } if (input.content) { + console.log('input.content', input.content) obj.content = toAsyncIterable(input.content) } else if (!input.path) { // Not already a file object with path or content prop + console.log('Not file object yet') obj.content = toAsyncIterable(input) } + console.log('obj.content', obj.content) + return obj } function toAsyncIterable (input) { // Bytes | String if (isBytes(input) || typeof input === 'string') { + console.log('bytes') return (async function * () { // eslint-disable-line require-await yield toBuffer(input) })() @@ -177,9 +184,12 @@ function toAsyncIterable (input) { // Bloby if (isBloby(input)) { + console.log('bloby') return blobToAsyncGenerator(input) } + console.log('other') + // Browser stream if (typeof input.getReader === 'function') { return browserStreamToIt(input) From 094161b0d15e9920d22b750aee6567e30321dc46 Mon Sep 17 00:00:00 2001 From: Paul Cowgill Date: Wed, 11 Mar 2020 15:08:27 -0500 Subject: [PATCH 20/20] Logging monorepo version usage and function name --- packages/ipfs-http-client/src/add/index.js | 1 + packages/ipfs-http-client/src/dag/put.js | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/ipfs-http-client/src/add/index.js b/packages/ipfs-http-client/src/add/index.js index fc211da30b..dee8f6a365 100644 --- a/packages/ipfs-http-client/src/add/index.js +++ b/packages/ipfs-http-client/src/add/index.js @@ -10,6 +10,7 @@ const toCamel = require('../lib/object-to-camel') module.exports = configure(({ ky }) => { return async function * add (input, options) { console.log("Add called"); + console.log("Using monorepo version") options = options || {} const searchParams = new URLSearchParams(options.searchParams) diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index 3b770005a8..a86a9a9a10 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -8,6 +8,7 @@ const toFormData = require('../lib/buffer-to-form-data') module.exports = configure(({ ky }) => { return async (dagNode, options) => { + console.log("dag.put called") console.log("Using monorepo version") options = options || {}