From fbf352a43059131ebc537c1cd445fd51aef2505b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 10:23:17 +0000 Subject: [PATCH 1/2] deps(dev): bump aegir from 41.3.5 to 42.2.2 Bumps [aegir](https://github.com/ipfs/aegir) from 41.3.5 to 42.2.2. - [Release notes](https://github.com/ipfs/aegir/releases) - [Changelog](https://github.com/ipfs/aegir/blob/master/CHANGELOG.md) - [Commits](https://github.com/ipfs/aegir/compare/v41.3.5...v42.2.2) --- updated-dependencies: - dependency-name: aegir dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- package.json | 2 +- packages/ipfs-unixfs-exporter/package.json | 2 +- packages/ipfs-unixfs-importer/package.json | 2 +- packages/ipfs-unixfs/package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index c56f488f..c90cf116 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "release": "npm run docs:no-publish && aegir run release && npm run docs" }, "devDependencies": { - "aegir": "^41.3.2" + "aegir": "^42.2.2" }, "workspaces": [ "packages/*" diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 08581f4d..520dbd91 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -157,7 +157,7 @@ "devDependencies": { "@types/readable-stream": "^4.0.1", "@types/sinon": "^17.0.2", - "aegir": "^41.3.2", + "aegir": "^42.2.2", "blockstore-core": "^4.0.1", "delay": "^6.0.0", "ipfs-unixfs-importer": "^15.0.0", diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 4ddf2116..213c03f2 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -177,7 +177,7 @@ "uint8arrays": "^5.0.0" }, "devDependencies": { - "aegir": "^41.3.2", + "aegir": "^42.2.2", "blockstore-core": "^4.0.1", "it-last": "^3.0.2", "wherearewe": "^2.0.1" diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index 3a92aafa..33ede90f 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -144,7 +144,7 @@ "uint8arraylist": "^2.4.3" }, "devDependencies": { - "aegir": "^41.3.2", + "aegir": "^42.2.2", "protons": "^7.0.2", "uint8arrays": "^5.0.0" }, From 2a1f8adc6f2c6ad50b717cf7ce99b599b79ffecb Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 19 Jan 2024 08:18:21 +0100 Subject: [PATCH 2/2] chore: update docs, deps and types --- .github/dependabot.yml | 2 +- .github/workflows/js-test-and-release.yml | 2 + .github/workflows/semantic-pull-request.yml | 12 + .npmrc | 2 - README.md | 53 +--- package.json | 91 ++++++- packages/ipfs-unixfs-exporter/README.md | 194 +-------------- packages/ipfs-unixfs-exporter/package.json | 101 +------- packages/ipfs-unixfs-exporter/src/index.ts | 230 ++++++++++++++++++ packages/ipfs-unixfs-exporter/typedoc.json | 6 + packages/ipfs-unixfs-importer/README.md | 78 +----- packages/ipfs-unixfs-importer/package.json | 97 +------- .../ipfs-unixfs-importer/src/chunker/index.ts | 9 + packages/ipfs-unixfs-importer/src/index.ts | 64 +++++ .../ipfs-unixfs-importer/src/layout/index.ts | 8 + packages/ipfs-unixfs-importer/typedoc.json | 7 + packages/ipfs-unixfs/README.md | 126 +++------- packages/ipfs-unixfs/package.json | 97 +------- packages/ipfs-unixfs/src/index.ts | 92 +++++++ packages/ipfs-unixfs/typedoc.json | 6 + typedoc.json | 7 + 21 files changed, 604 insertions(+), 680 deletions(-) create mode 100644 .github/workflows/semantic-pull-request.yml delete mode 100644 .npmrc create mode 100644 packages/ipfs-unixfs-exporter/typedoc.json create mode 100644 packages/ipfs-unixfs-importer/typedoc.json create mode 100644 packages/ipfs-unixfs/typedoc.json create mode 100644 typedoc.json diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 0bc3b42d..d401a774 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,7 @@ updates: schedule: interval: daily time: "10:00" - open-pull-requests-limit: 10 + open-pull-requests-limit: 20 commit-message: prefix: "deps" prefix-development: "deps(dev)" diff --git a/.github/workflows/js-test-and-release.yml b/.github/workflows/js-test-and-release.yml index 2c7a14bb..359eb975 100644 --- a/.github/workflows/js-test-and-release.yml +++ b/.github/workflows/js-test-and-release.yml @@ -9,7 +9,9 @@ on: permissions: contents: write + id-token: write packages: write + pull-requests: write concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} diff --git a/.github/workflows/semantic-pull-request.yml b/.github/workflows/semantic-pull-request.yml new file mode 100644 index 00000000..bd00f090 --- /dev/null +++ b/.github/workflows/semantic-pull-request.yml @@ -0,0 +1,12 @@ +name: Semantic PR + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + main: + uses: pl-strflt/.github/.github/workflows/reusable-semantic-pull-request.yml@v0.3 diff --git a/.npmrc b/.npmrc deleted file mode 100644 index c5ebf5e2..00000000 --- a/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -; package-lock with tarball deps breaks lerna/nx - remove when https://github.com/semantic-release/github/pull/487 is merged -package-lock=false diff --git a/README.md b/README.md index be9ec3e4..a617dbb7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,3 @@ -# js-ipfs-unixfs - [![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) [![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) [![codecov](https://img.shields.io/codecov/c/github/ipfs/js-ipfs-unixfs.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) @@ -7,67 +5,26 @@ > JS implementation of the IPFS UnixFS -## Table of contents - -- [Structure](#structure) -- [Lead Maintainer ](#lead-maintainer----omit-in-toc---) -- [Development](#development) - - [Publishing new versions](#publishing-new-versions) - - [Using prerelease versions](#using-prerelease-versions) -- [API Docs](#api-docs) -- [License](#license) -- [Contribute](#contribute) +The UnixFS spec can be found at [ipfs/specs/UNIXFS.md](https://github.com/ipfs/specs/blob/master/UNIXFS.md) -## Structure +# Packages - [`/packages/ipfs-unixfs`](./packages/ipfs-unixfs) JavaScript implementation of IPFS' unixfs (a Unix FileSystem representation on top of a MerkleDAG) - [`/packages/ipfs-unixfs-exporter`](./packages/ipfs-unixfs-exporter) JavaScript implementation of the UnixFs exporter used by IPFS - [`/packages/ipfs-unixfs-importer`](./packages/ipfs-unixfs-importer) JavaScript implementation of the UnixFs importer used by IPFS -The UnixFS spec can be found at [ipfs/specs/UNIXFS.md](https://github.com/ipfs/specs/blob/master/UNIXFS.md) - -## Lead Maintainer - -[Alex Potsides](https://github.com/achingbrain) - -- [`/packages/ipfs-unixfs`](./packages/ipfs-unixfs) Serialization/deserialization of UnixFS objects to protocol buffers -- [`/packages/ipfs-unixfs-importer`](./packages/ipfs-unixfs-importer) Builds DAGs from files and directories -- [`/packages/ipfs-unixfs-exporter`](./packages/ipfs-unixfs-exporter) Exports DAGs - -## Development - -1. Clone this repo -2. Run `npm install` - -This will install [lerna](https://www.npmjs.com/package/lerna) and bootstrap the various packages, dedpuing and hoisting dependencies into the root folder. - -If later you wish to remove all the `node_modules`/`dist` folders and start again, run `npm run reset && npm install` from the root. - -See the scripts section of the root [`package.json`](./package.json) for more commands. - -### Publishing new versions - -1. Ensure you have a `GH_TOKEN` env var containing a GitHub [Personal Access Token](https://github.com/settings/tokens) with `public_repo` permissions -2. From the root of this repo run `npm run release` and follow the on screen prompts. It will use [conventional commits](https://www.conventionalcommits.org) to work out the new package version - -### Using prerelease versions - -Any changed packages from each successful build of master are published to npm as canary builds under the npm tag `next`. - -Canary builds only consider changes to packages in the last built commit so changes to the root config files should not result in new prereleases being published to npm. - -## API Docs +# API Docs - -## License +# License Licensed under either of - Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) - MIT ([LICENSE-MIT](LICENSE-MIT) / ) -## Contribute +# Contribute Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-ipfs-unixfs/issues). diff --git a/package.json b/package.json index c90cf116..5299dfe0 100644 --- a/package.json +++ b/package.json @@ -11,10 +11,6 @@ "bugs": { "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - }, "private": true, "scripts": { "reset": "aegir run clean && aegir clean ./node_modules ./package-lock.json packages/*/node_modules packages/*/package-lock.json", @@ -40,5 +36,90 @@ }, "workspaces": [ "packages/*" - ] + ], + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + "@semantic-release/git" + ] + } } diff --git a/packages/ipfs-unixfs-exporter/README.md b/packages/ipfs-unixfs-exporter/README.md index 66655aa0..09e866df 100644 --- a/packages/ipfs-unixfs-exporter/README.md +++ b/packages/ipfs-unixfs-exporter/README.md @@ -7,36 +7,9 @@ > JavaScript implementation of the UnixFs exporter used by IPFS -## Table of contents - -- [Install](#install) - - [Browser ` -``` +The UnixFS Exporter provides a means to read DAGs from a blockstore given a CID. ## Example @@ -79,175 +52,32 @@ for await (const buf of entry.content()) { console.info(bytes) // 0, 1, 2, 3 ``` -## API - -```js -import { exporter } from 'ipfs-unixfs-exporter' -``` - -### `exporter(cid, blockstore, options)` - -Uses the given [blockstore][] instance to fetch an IPFS node by it's CID. - -Returns a Promise which resolves to a `UnixFSEntry`. - -`options` is an optional object argument that might include the following keys: - -- `signal` ([AbortSignal](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)): Used to cancel any network requests that are initiated as a result of this export - -### UnixFSEntry +# Install -```javascript -{ - type: 'file' // or 'directory' - name: 'foo.txt', - path: 'Qmbar/foo.txt', - cid: CID, // see https://github.com/multiformats/js-cid - content: function, // returns an async iterator - unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs -} -``` - -If the entry is a file, `entry.content()` returns an async iterator that yields one or more Uint8Arrays containing the file content: - -```javascript -if (entry.type === 'file') { - for await (const chunk of entry.content()) { - // chunk is a Buffer - } -} -``` - -If the entry is a directory, `entry.content()` returns further `entry` objects: - -```javascript -if (entry.type === 'directory') { - for await (const entry of dir.content()) { - console.info(entry.name) - } -} -``` - -### Raw entries - -Entries with a `raw` codec `CID` return raw entries: - -```javascript -{ - name: 'foo.txt', - path: 'Qmbar/foo.txt', - cid: CID, // see https://github.com/multiformats/js-cid - node: Buffer, // see https://nodejs.org/api/buffer.html - content: function, // returns an async iterator -} -``` - -`entry.content()` returns an async iterator that yields a buffer containing the node content: - -```javascript -for await (const chunk of entry.content()) { - // chunk is a Buffer -} -``` - -Unless you an options object containing `offset` and `length` keys as an argument to `entry.content()`, `chunk` will be equal to `entry.node`. - -### CBOR entries - -Entries with a `dag-cbor` codec `CID` return JavaScript object entries: - -```javascript -{ - name: 'foo.txt', - path: 'Qmbar/foo.txt', - cid: CID, // see https://github.com/multiformats/js-cid - node: Uint8Array, - content: function // returns an async iterator that yields a single object - see https://github.com/ipld/js-ipld-dag-cbor -} -``` - -There is no `content` function for a `CBOR` node. - -### `entry.content({ offset, length })` - -When `entry` is a file or a `raw` node, `offset` and/or `length` arguments can be passed to `entry.content()` to return slices of data: - -```javascript -const length = 5 -const data = new Uint8Array(length) -let offset = 0 - -for await (const chunk of entry.content({ - offset: 0, - length -})) { - data.set(chunk, offset) - offset += chunk.length -} - -// `data` contains the first 5 bytes of the file -return data -``` - -If `entry` is a directory, passing `offset` and/or `length` to `entry.content()` will limit the number of files returned from the directory. - -```javascript -const entries = [] - -for await (const entry of dir.content({ - offset: 0, - length: 5 -})) { - entries.push(entry) -} - -// `entries` contains the first 5 files/directories in the directory -``` - -### `walkPath(cid, blockstore)` - -`walkPath` will return an async iterator that yields entries for all segments in a path: - -```javascript -import { walkPath } from 'ipfs-unixfs-exporter' - -const entries = [] - -for await (const entry of walkPath('Qmfoo/foo/bar/baz.txt', blockstore)) { - entries.push(entry) -} - -// entries contains 4x `entry` objects +```console +$ npm i ipfs-unixfs-exporter ``` -### `recursive(cid, blockstore)` - -`recursive` will return an async iterator that yields all entries beneath a given CID or IPFS path, as well as the containing directory. - -```javascript -import { recursive } from 'ipfs-unixfs-exporter' - -const entries = [] +## Browser ` ``` -## API Docs +# API Docs - -## License +# License Licensed under either of - Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) - MIT ([LICENSE-MIT](LICENSE-MIT) / ) -## Contribute +# Contribute Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-ipfs-unixfs/issues). diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 520dbd91..84d0c8e1 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -11,13 +11,13 @@ "bugs": { "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, + "publishConfig": { + "access": "public", + "provenance": true + }, "keywords": [ "IPFS" ], - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - }, "type": "module", "types": "./dist/src/index.d.ts", "files": [ @@ -35,94 +35,10 @@ "eslintConfig": { "extends": "ipfs", "parserOptions": { + "project": true, "sourceType": "module" } }, - "release": { - "branches": [ - "master" - ], - "plugins": [ - [ - "@semantic-release/commit-analyzer", - { - "preset": "conventionalcommits", - "releaseRules": [ - { - "breaking": true, - "release": "major" - }, - { - "revert": true, - "release": "patch" - }, - { - "type": "feat", - "release": "minor" - }, - { - "type": "fix", - "release": "patch" - }, - { - "type": "docs", - "release": "patch" - }, - { - "type": "test", - "release": "patch" - }, - { - "type": "deps", - "release": "patch" - }, - { - "scope": "no-release", - "release": false - } - ] - } - ], - [ - "@semantic-release/release-notes-generator", - { - "preset": "conventionalcommits", - "presetConfig": { - "types": [ - { - "type": "feat", - "section": "Features" - }, - { - "type": "fix", - "section": "Bug Fixes" - }, - { - "type": "chore", - "section": "Trivial Changes" - }, - { - "type": "docs", - "section": "Documentation" - }, - { - "type": "deps", - "section": "Dependencies" - }, - { - "type": "test", - "section": "Tests" - } - ] - } - } - ], - "@semantic-release/changelog", - "@semantic-release/npm", - "@semantic-release/github", - "@semantic-release/git" - ] - }, "scripts": { "test": "aegir test", "test:node": "aegir test -t node --cov", @@ -151,8 +67,7 @@ "it-pushable": "^3.1.0", "multiformats": "^13.0.0", "p-queue": "^8.0.1", - "progress-events": "^1.0.0", - "uint8arrays": "^5.0.0" + "progress-events": "^1.0.0" }, "devDependencies": { "@types/readable-stream": "^4.0.1", @@ -169,13 +84,11 @@ "merge-options": "^3.0.4", "readable-stream": "^4.4.0", "sinon": "^17.0.1", + "uint8arrays": "^5.0.0", "wherearewe": "^2.0.1" }, "browser": { "fs": false, "readable-stream": false - }, - "typedoc": { - "entryPoint": "./src/index.ts" } } diff --git a/packages/ipfs-unixfs-exporter/src/index.ts b/packages/ipfs-unixfs-exporter/src/index.ts index 6566c783..5d31070d 100644 --- a/packages/ipfs-unixfs-exporter/src/index.ts +++ b/packages/ipfs-unixfs-exporter/src/index.ts @@ -1,3 +1,50 @@ +/** + * @packageDocumentation + * + * The UnixFS Exporter provides a means to read DAGs from a blockstore given a CID. + * + * @example + * + * ```js + * // import a file and export it again + * import { importer } from 'ipfs-unixfs-importer' + * import { exporter } from 'ipfs-unixfs-exporter' + * import { MemoryBlockstore } from 'blockstore-core/memory' + * + * // Should contain the blocks we are trying to export + * const blockstore = new MemoryBlockstore() + * const files = [] + * + * for await (const file of importer([{ + * path: '/foo/bar.txt', + * content: new Uint8Array([0, 1, 2, 3]) + * }], blockstore)) { + * files.push(file) + * } + * + * console.info(files[0].cid) // Qmbaz + * + * const entry = await exporter(files[0].cid, blockstore) + * + * console.info(entry.cid) // Qmqux + * console.info(entry.path) // Qmbaz/foo/bar.txt + * console.info(entry.name) // bar.txt + * console.info(entry.unixfs.fileSize()) // 4 + * + * // stream content from unixfs node + * const size = entry.unixfs.fileSize() + * const bytes = new Uint8Array(size) + * let offset = 0 + * + * for await (const buf of entry.content()) { + * bytes.set(buf, offset) + * offset += chunk.length + * } + * + * console.info(bytes) // 0, 1, 2, 3 + * ``` + */ + import errCode from 'err-code' import last from 'it-last' import { CID } from 'multiformats/cid' @@ -53,42 +100,162 @@ export interface ExporterOptions extends ProgressOptions } export interface Exportable { + /** + * A disambiguator to allow TypeScript to work out the type of the entry. + * + * @example + * + * ```TypeScript + * if (entry.type === 'file') { + * // access UnixFSFile properties safely + * } + * + * if (entry.type === 'directory') { + * // access UnixFSDirectory properties safely + * } + * ``` + */ type: 'file' | 'directory' | 'object' | 'raw' | 'identity' + + /** + * The name of the entry + */ name: string + + /** + * The path of the entry within the DAG in which it was encountered + */ path: string + + /** + * The CID of the entry + */ cid: CID + + /** + * How far down the DAG the entry is + */ depth: number + + /** + * The size of the entry + */ size: bigint + + /** + * When `entry` is a file or a `raw` node, `offset` and/or `length` arguments can be passed to `entry.content()` to return slices of data: + * + * ```javascript + * const length = 5 + * const data = new Uint8Array(length) + * let offset = 0 + * + * for await (const chunk of entry.content({ + * offset: 0, + * length + * })) { + * data.set(chunk, offset) + * offset += chunk.length + * } + * + * // `data` contains the first 5 bytes of the file + * return data + * ``` + * + * If `entry` is a directory, passing `offset` and/or `length` to `entry.content()` will limit the number of files returned from the directory. + * + * ```javascript + * const entries = [] + * + * for await (const entry of dir.content({ + * offset: 0, + * length: 5 + * })) { + * entries.push(entry) + * } + * + * // `entries` contains the first 5 files/directories in the directory + * ``` + * + */ content(options?: ExporterOptions): AsyncGenerator } +/** + * If the entry is a file, `entry.content()` returns an async iterator that yields one or more Uint8Arrays containing the file content: + * + * ```javascript + * if (entry.type === 'file') { + * for await (const chunk of entry.content()) { + * // chunk is a Buffer + * } + * } + * ``` + */ export interface UnixFSFile extends Exportable { type: 'file' unixfs: UnixFS node: PBNode } +/** + * If the entry is a directory, `entry.content()` returns further `entry` objects: + * + * ```javascript + * if (entry.type === 'directory') { + * for await (const entry of dir.content()) { + * console.info(entry.name) + * } + * } + * ``` + */ export interface UnixFSDirectory extends Exportable { type: 'directory' unixfs: UnixFS node: PBNode } +/** + * Entries with a `dag-cbor` or `dag-json` codec {@link CID} return JavaScript object entries + */ export interface ObjectNode extends Exportable { type: 'object' node: Uint8Array } +/** + * Entries with a `raw` codec {@link CID} return raw entries. + * + * `entry.content()` returns an async iterator that yields a buffer containing the node content: + * + * ```javascript + * for await (const chunk of entry.content()) { + * // chunk is a Buffer + * } + * ``` + * + * Unless you an options object containing `offset` and `length` keys as an argument to `entry.content()`, `chunk` will be equal to `entry.node`. + */ export interface RawNode extends Exportable { type: 'raw' node: Uint8Array } +/** + * Entries with a `identity` codec {@link CID} return identity entries. + * + * These are entries where the data payload is stored in the CID itself, + * otherwise they are identical to {@link RawNode}s. + */ export interface IdentityNode extends Exportable { type: 'identity' node: Uint8Array } +/** + * A UnixFSEntry is a representation of the types of node that can be + * encountered in a DAG. + */ export type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode export interface NextResult { @@ -117,6 +284,10 @@ export interface ShardTraversalContext { lastBucket: Bucket } +/** + * A subset of the {@link Blockstore} interface that just contains the get + * method. + */ export type ReadableStorage = Pick const toPathComponents = (path: string = ''): string[] => { @@ -159,6 +330,23 @@ const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: str throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH') } +/** + * Returns an async iterator that yields entries for all segments in a path + * + * @example + * + * ```javascript + * import { walkPath } from 'ipfs-unixfs-exporter' + * + * const entries = [] + * + * for await (const entry of walkPath('Qmfoo/foo/bar/baz.txt', blockstore)) { + * entries.push(entry) + * } + * + * // entries contains 4x `entry` objects + * ``` + */ export async function * walkPath (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { let { cid, @@ -191,6 +379,30 @@ export async function * walkPath (path: string | CID, blockstore: ReadableStorag } } +/** + * Uses the given blockstore instance to fetch an IPFS node by a CID or path. + * + * Returns a {@link Promise} which resolves to a {@link UnixFSEntry}. + * + * @example + * + * ```typescript + * import { exporter } from 'ipfs-unixfs-exporter' + * import { CID } from 'multiformats/cid' + * + * const cid = CID.parse('QmFoo') + * + * const entry = await exporter(cid, blockstore, { + * signal: AbortSignal.timeout(50000) + * }) + * + * if (entry.type === 'file') { + * for await (const chunk of entry.content()) { + * // chunk is a Uint8Array + * } + * } + * ``` + */ export async function exporter (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): Promise { const result = await last(walkPath(path, blockstore, options)) @@ -201,6 +413,24 @@ export async function exporter (path: string | CID, blockstore: ReadableStorage, return result } +/** + * Returns an async iterator that yields all entries beneath a given CID or IPFS + * path, as well as the containing directory. + * + * @example + * + * ```typescript + * import { recursive } from 'ipfs-unixfs-exporter' + * + * const entries = [] + * + * for await (const child of recursive('Qmfoo/foo/bar', blockstore)) { + * entries.push(entry) + * } + * + * // entries contains all children of the `Qmfoo/foo/bar` directory and it's children + * ``` + */ export async function * recursive (path: string | CID, blockstore: ReadableStorage, options: ExporterOptions = {}): AsyncGenerator { const node = await exporter(path, blockstore, options) diff --git a/packages/ipfs-unixfs-exporter/typedoc.json b/packages/ipfs-unixfs-exporter/typedoc.json new file mode 100644 index 00000000..3be48369 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/typedoc.json @@ -0,0 +1,6 @@ +{ + "entryPoints": [ + "./src/index.ts" + ], + "readme": "none" +} diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index 9eb326d9..c8544ca3 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -7,40 +7,13 @@ > JavaScript implementation of the UnixFs importer used by IPFS -## Table of contents - -- [Install](#install) - - [Browser ` -``` +# About ## Example Let's create a little directory to import: -```sh +```console > cd /tmp > mkdir foo > echo 'hello' > foo/bar @@ -96,59 +69,32 @@ When run, metadata about DAGNodes in the created tree is printed until the root: } ``` -## API +# Install -```js -import { importer, importFile, importDir, importBytes, importByteStream } from 'ipfs-unixfs-importer' +```console +$ npm i ipfs-unixfs-importer ``` -### const stream = importer(source, blockstore \[, options]) +## Browser ` ``` -`stream` will output file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written. - -`blockstore` is an instance of a [blockstore][] - -The input's file paths and directory structure will be preserved in the [`dag-pb`](https://github.com/ipld/js-dag-pb) created nodes. - -### const result = await importFile(content, blockstore \[, options]) - -A convenience function for importing a single file or directory. - -### const result = await importDirectory(content, blockstore \[, options]) - -A convenience function for importing a directory - note this is non-recursive, to import recursively use the [importer](#const-stream--importersource-blockstore--options) function. - -### const result = await importBytes(buf, blockstore \[, options]) - -A convenience function for importing a single Uint8Array. - -### const result = await importByteStream(source, blockstore \[, options]) - -A convenience function for importing a single stream of Uint8Arrays. - -## API Docs +# API Docs - -## License +# License Licensed under either of - Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) - MIT ([LICENSE-MIT](LICENSE-MIT) / ) -## Contribute +# Contribute Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-ipfs-unixfs/issues). diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 213c03f2..eab11abb 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -11,13 +11,13 @@ "bugs": { "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, + "publishConfig": { + "access": "public", + "provenance": true + }, "keywords": [ "IPFS" ], - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - }, "type": "module", "types": "./dist/src/index.d.ts", "typesVersions": { @@ -59,94 +59,10 @@ "eslintConfig": { "extends": "ipfs", "parserOptions": { + "project": true, "sourceType": "module" } }, - "release": { - "branches": [ - "master" - ], - "plugins": [ - [ - "@semantic-release/commit-analyzer", - { - "preset": "conventionalcommits", - "releaseRules": [ - { - "breaking": true, - "release": "major" - }, - { - "revert": true, - "release": "patch" - }, - { - "type": "feat", - "release": "minor" - }, - { - "type": "fix", - "release": "patch" - }, - { - "type": "docs", - "release": "patch" - }, - { - "type": "test", - "release": "patch" - }, - { - "type": "deps", - "release": "patch" - }, - { - "scope": "no-release", - "release": false - } - ] - } - ], - [ - "@semantic-release/release-notes-generator", - { - "preset": "conventionalcommits", - "presetConfig": { - "types": [ - { - "type": "feat", - "section": "Features" - }, - { - "type": "fix", - "section": "Bug Fixes" - }, - { - "type": "chore", - "section": "Trivial Changes" - }, - { - "type": "docs", - "section": "Documentation" - }, - { - "type": "deps", - "section": "Dependencies" - }, - { - "type": "test", - "section": "Tests" - } - ] - } - } - ], - "@semantic-release/changelog", - "@semantic-release/npm", - "@semantic-release/github", - "@semantic-release/git" - ] - }, "scripts": { "test": "aegir test", "test:node": "aegir test -t node --cov", @@ -184,8 +100,5 @@ }, "browser": { "fs": false - }, - "typedoc": { - "entryPoint": "./src/index.ts" } } diff --git a/packages/ipfs-unixfs-importer/src/chunker/index.ts b/packages/ipfs-unixfs-importer/src/chunker/index.ts index 98ef07f8..4a1dc803 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/index.ts +++ b/packages/ipfs-unixfs-importer/src/chunker/index.ts @@ -1,3 +1,12 @@ +/** + * @packageDocumentation + * + * Chunker functions split an incoming stream of bytes into chunks. + * + * The default is a fixed-size chunker which splits them into equally sized + * chunks though other strategies are available such as Rabin chunking. + */ + export interface Chunker { (source: AsyncIterable): AsyncIterable } export { rabin } from './rabin.js' diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index d0dc4a7a..aa4c3243 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -1,3 +1,67 @@ +/** + * @packageDocumentation + * + * @example + * + * Let's create a little directory to import: + * + * ```console + * > cd /tmp + * > mkdir foo + * > echo 'hello' > foo/bar + * > echo 'world' > foo/quux + * ``` + * + * And write the importing logic: + * + * ```js + * import { importer } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core/memory' + * import * as fs from 'node:fs' + * + * // Where the blocks will be stored + * const blockstore = new MemoryBlockstore() + * + * // Import path /tmp/foo/ + * const source = [{ + * path: '/tmp/foo/bar', + * content: fs.createReadStream('/tmp/foo/bar') + * }, { + * path: '/tmp/foo/quxx', + * content: fs.createReadStream('/tmp/foo/quux') + * }] + * + * for await (const entry of importer(source, blockstore)) { + * console.info(entry) + * } + * ``` + * + * When run, metadata about DAGNodes in the created tree is printed until the root: + * + * ```js + * { + * cid: CID, // see https://github.com/multiformats/js-cid + * path: 'tmp/foo/bar', + * unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs + * } + * { + * cid: CID, // see https://github.com/multiformats/js-cid + * path: 'tmp/foo/quxx', + * unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs + * } + * { + * cid: CID, // see https://github.com/multiformats/js-cid + * path: 'tmp/foo', + * unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs + * } + * { + * cid: CID, // see https://github.com/multiformats/js-cid + * path: 'tmp', + * unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs + * } + * ``` + */ + import errcode from 'err-code' import first from 'it-first' import parallelBatch from 'it-parallel-batch' diff --git a/packages/ipfs-unixfs-importer/src/layout/index.ts b/packages/ipfs-unixfs-importer/src/layout/index.ts index 99b5be91..3a7e02b9 100644 --- a/packages/ipfs-unixfs-importer/src/layout/index.ts +++ b/packages/ipfs-unixfs-importer/src/layout/index.ts @@ -1,3 +1,11 @@ +/** + * @packageDocumentation + * + * Layout functions allow customising the shape of final DAGs + * + * {@link https://dag.ipfs.tech} can be used to explore different conigurations. + */ + import type { InProgressImportResult } from '../index.js' export interface Reducer { (leaves: InProgressImportResult[]): Promise } diff --git a/packages/ipfs-unixfs-importer/typedoc.json b/packages/ipfs-unixfs-importer/typedoc.json new file mode 100644 index 00000000..34037848 --- /dev/null +++ b/packages/ipfs-unixfs-importer/typedoc.json @@ -0,0 +1,7 @@ +{ + "entryPoints": [ + "./src/index.ts", + "./src/chunker/index.ts", + "./src/layout/index.ts" + ] +} diff --git a/packages/ipfs-unixfs/README.md b/packages/ipfs-unixfs/README.md index aa5b963f..53b8db35 100644 --- a/packages/ipfs-unixfs/README.md +++ b/packages/ipfs-unixfs/README.md @@ -7,62 +7,13 @@ > JavaScript implementation of IPFS' unixfs (a Unix FileSystem representation on top of a MerkleDAG) -## Table of contents - -- [Install](#install) - - [Browser ` -``` - -## Spec +This module contains the protobuf definition of the UnixFS data structure found at the root of all UnixFS DAGs. -The UnixFS spec can be found inside the [ipfs/specs repository](http://github.com/ipfs/specs) +The UnixFS spec can be found in the [ipfs/specs repository](http://github.com/ipfs/specs) -### Use in Node.js - -```JavaScript -import { UnixFS } from 'ipfs-unixfs' -``` - -### Use in a browser with browserify, webpack or any other bundler - -The code published to npm that gets loaded on require is in fact a ES5 transpiled version with the right shims added. This means that you can require it and use with your favourite bundler without having to adjust asset management process. - -```JavaScript -import { UnixFS } from 'ipfs-unixfs' -``` - -## Examples - -### Create a file composed by several blocks +## Example - Create a file composed of several blocks ```JavaScript const data = new UnixFS({ type: 'file' }) @@ -71,7 +22,7 @@ data.addBlockSize(256) // ... ``` -### Create a directory that contains several files +## Example - Create a directory that contains several files Creating a directory that contains several files is achieve by creating a unixfs element that identifies a MerkleDAG node as a directory. The links of that MerkleDAG node are the files that are contained in this directory. @@ -79,42 +30,7 @@ Creating a directory that contains several files is achieve by creating a unixfs const data = new UnixFS({ type: 'directory' }) ``` -### UnixFS Data Structure - -```protobuf -syntax = "proto2"; - -message Data { - enum DataType { - Raw = 0; - Directory = 1; - File = 2; - Metadata = 3; - Symlink = 4; - HAMTShard = 5; - } - - required DataType Type = 1; - optional bytes Data = 2; - optional uint64 filesize = 3; - repeated uint64 blocksizes = 4; - optional uint64 hashType = 5; - optional uint64 fanout = 6; - optional uint32 mode = 7; - optional UnixTime mtime = 8; -} - -message UnixTime { - required int64 Seconds = 1; - optional fixed32 FractionalNanoseconds = 2; -} - -message Metadata { - optional string MimeType = 1; -} -``` - -### create an unixfs Data element +## Example - Create an unixfs Data element ```JavaScript const data = new UnixFS([options]) @@ -134,7 +50,7 @@ const data = new UnixFS([options]) - mode (Number, default `0644` for files, `0755` for directories/hamt-sharded-directories) file mode - mtime (`Date`, `{ secs, nsecs }`, `{ Seconds, FractionalNanoseconds }`, `[ secs, nsecs ]`): The modification time of this node -### add and remove a block size to the block size list +## Example - Add and remove a block size to the block size list ```JavaScript data.addBlockSize() @@ -144,20 +60,20 @@ data.addBlockSize() data.removeBlockSize() ``` -### get total fileSize +## Example - Get total fileSize ```JavaScript data.fileSize() // => size in bytes ``` -### marshal and unmarshal +## Example - Marshal and unmarshal ```javascript const marshaled = data.marshal() const unmarshaled = Unixfs.unmarshal(marshaled) ``` -### is this UnixFS entry a directory? +## Example - Is this UnixFS entry a directory? ```JavaScript const dir = new Data({ type: 'directory' }) @@ -167,7 +83,7 @@ const file = new Data({ type: 'file' }) file.isDirectory() // false ``` -### has an mtime been set? +## Example - Has an mtime been set? If no modification time has been set, no `mtime` property will be present on the `Data` instance: @@ -181,18 +97,32 @@ const dir = new Data({ type: 'dir', mtime: new Date() }) dir.mtime // { secs: Number, nsecs: Number } ``` -## API Docs +# Install + +```console +$ npm i ipfs-unixfs +``` + +## Browser ` +``` + +# API Docs - -## License +# License Licensed under either of - Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) - MIT ([LICENSE-MIT](LICENSE-MIT) / ) -## Contribute +# Contribute Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-ipfs-unixfs/issues). diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index 33ede90f..6f123d0c 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -11,13 +11,13 @@ "bugs": { "url": "https://github.com/ipfs/js-ipfs-unixfs/issues" }, + "publishConfig": { + "access": "public", + "provenance": true + }, "keywords": [ "IPFS" ], - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - }, "type": "module", "types": "./dist/src/index.d.ts", "files": [ @@ -35,97 +35,13 @@ "eslintConfig": { "extends": "ipfs", "parserOptions": { + "project": true, "sourceType": "module" }, "ignorePatterns": [ "src/unixfs.d.ts" ] }, - "release": { - "branches": [ - "master" - ], - "plugins": [ - [ - "@semantic-release/commit-analyzer", - { - "preset": "conventionalcommits", - "releaseRules": [ - { - "breaking": true, - "release": "major" - }, - { - "revert": true, - "release": "patch" - }, - { - "type": "feat", - "release": "minor" - }, - { - "type": "fix", - "release": "patch" - }, - { - "type": "docs", - "release": "patch" - }, - { - "type": "test", - "release": "patch" - }, - { - "type": "deps", - "release": "patch" - }, - { - "scope": "no-release", - "release": false - } - ] - } - ], - [ - "@semantic-release/release-notes-generator", - { - "preset": "conventionalcommits", - "presetConfig": { - "types": [ - { - "type": "feat", - "section": "Features" - }, - { - "type": "fix", - "section": "Bug Fixes" - }, - { - "type": "chore", - "section": "Trivial Changes" - }, - { - "type": "docs", - "section": "Documentation" - }, - { - "type": "deps", - "section": "Dependencies" - }, - { - "type": "test", - "section": "Tests" - } - ] - } - } - ], - "@semantic-release/changelog", - "@semantic-release/npm", - "@semantic-release/github", - "@semantic-release/git" - ] - }, "scripts": { "generate": "protons src/unixfs.proto", "test": "aegir test", @@ -150,8 +66,5 @@ }, "browser": { "fs": false - }, - "typedoc": { - "entryPoint": "./src/index.ts" } } diff --git a/packages/ipfs-unixfs/src/index.ts b/packages/ipfs-unixfs/src/index.ts index 2026d806..b6263d64 100644 --- a/packages/ipfs-unixfs/src/index.ts +++ b/packages/ipfs-unixfs/src/index.ts @@ -1,3 +1,95 @@ +/** + * @packageDocumentation + * + * This module contains the protobuf definition of the UnixFS data structure found at the root of all UnixFS DAGs. + * + * The UnixFS spec can be found in the [ipfs/specs repository](http://github.com/ipfs/specs) + * + * @example Create a file composed of several blocks + * + * ```JavaScript + * const data = new UnixFS({ type: 'file' }) + * data.addBlockSize(256) // add the size of each block + * data.addBlockSize(256) + * // ... + * ``` + * + * @example Create a directory that contains several files + * + * Creating a directory that contains several files is achieve by creating a unixfs element that identifies a MerkleDAG node as a directory. The links of that MerkleDAG node are the files that are contained in this directory. + * + * ```JavaScript + * const data = new UnixFS({ type: 'directory' }) + * ``` + * + * @example Create an unixfs Data element + * + * ```JavaScript + * const data = new UnixFS([options]) + * ``` + * + * `options` is an optional object argument that might include the following keys: + * + * - type (string, default `file`): The type of UnixFS entry. Can be: + * - `raw` + * - `directory` + * - `file` + * - `metadata` + * - `symlink` + * - `hamt-sharded-directory` + * - data (Uint8Array): The optional data field for this node + * - blockSizes (Array, default: `[]`): If this is a `file` node that is made up of multiple blocks, `blockSizes` is a list numbers that represent the size of the file chunks stored in each child node. It is used to calculate the total file size. + * - mode (Number, default `0644` for files, `0755` for directories/hamt-sharded-directories) file mode + * - mtime (`Date`, `{ secs, nsecs }`, `{ Seconds, FractionalNanoseconds }`, `[ secs, nsecs ]`): The modification time of this node + * + * @example Add and remove a block size to the block size list + * + * ```JavaScript + * data.addBlockSize() + * ``` + * + * ```JavaScript + * data.removeBlockSize() + * ``` + * + * @example Get total fileSize + * + * ```JavaScript + * data.fileSize() // => size in bytes + * ``` + * + * @example Marshal and unmarshal + * + * ```javascript + * const marshaled = data.marshal() + * const unmarshaled = Unixfs.unmarshal(marshaled) + * ``` + * + * @example Is this UnixFS entry a directory? + * + * ```JavaScript + * const dir = new Data({ type: 'directory' }) + * dir.isDirectory() // true + * + * const file = new Data({ type: 'file' }) + * file.isDirectory() // false + * ``` + * + * @example Has an mtime been set? + * + * If no modification time has been set, no `mtime` property will be present on the `Data` instance: + * + * ```JavaScript + * const file = new Data({ type: 'file' }) + * file.mtime // undefined + * + * Object.prototype.hasOwnProperty.call(file, 'mtime') // false + * + * const dir = new Data({ type: 'dir', mtime: new Date() }) + * dir.mtime // { secs: Number, nsecs: Number } + * ``` + */ + import errcode from 'err-code' import { Data as PBData } from './unixfs.js' diff --git a/packages/ipfs-unixfs/typedoc.json b/packages/ipfs-unixfs/typedoc.json new file mode 100644 index 00000000..3be48369 --- /dev/null +++ b/packages/ipfs-unixfs/typedoc.json @@ -0,0 +1,6 @@ +{ + "entryPoints": [ + "./src/index.ts" + ], + "readme": "none" +} diff --git a/typedoc.json b/typedoc.json new file mode 100644 index 00000000..481c04ce --- /dev/null +++ b/typedoc.json @@ -0,0 +1,7 @@ +{ + "$schema": "https://typedoc.org/schema.json", + "name": "UnixFS", + "exclude": [ + "packages/interop" + ] +}