Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

feat: support storing metadata in unixfs nodes #39

Merged
merged 6 commits into from
Nov 22, 2019
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ stages:
- cov

node_js:
- '10'
- '12'

os:
- linux
Expand Down
27 changes: 13 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# ipfs-unixfs-importer
# ipfs-unixfs-importer <!-- omit in toc -->

[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
Expand All @@ -13,22 +13,19 @@

> JavaScript implementation of the layout and chunking mechanisms used by IPFS to handle Files

## Lead Maintainer
## Lead Maintainer <!-- omit in toc -->

[Alex Potsides](https://github.com/achingbrain)

## Table of Contents
## Table of Contents <!-- omit in toc -->

- [ipfs-unixfs-importer](#ipfs-unixfs-importer)
- [Lead Maintainer](#lead-maintainer)
- [Table of Contents](#table-of-contents)
- [Install](#install)
- [Usage](#usage)
- [Example](#example)
- [API](#api)
- [const import = importer(source, ipld [, options])](#const-import--importersource-ipld--options)
- [Contribute](#contribute)
- [License](#license)
- [Install](#install)
- [Usage](#usage)
- [Example](#example)
- [API](#api)
- [const import = importer(source, ipld [, options])](#const-import--importersource-ipld--options)
- [Contribute](#contribute)
- [License](#license)

## Install

Expand Down Expand Up @@ -108,7 +105,9 @@ The `import` function returns an async iterator takes a source async iterator th
```js
{
path: 'a name',
content: (Buffer or iterator emitting Buffers)
content: (Buffer or iterator emitting Buffers),
mtime: (Number representing seconds since (positive) or before (negative) the Unix Epoch),
mode: (Number representing ugo-rwx, setuid, setguid and sticky bit)
}
```

Expand Down
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
"cids": "~0.7.1",
"detect-node": "^2.0.4",
"dirty-chai": "^2.0.1",
"ipfs-unixfs-exporter": "~0.37.0",
"ipfs-unixfs-exporter": "^0.39.0",
"ipld": "^0.25.0",
"ipld-in-memory": "^3.0.0",
"multihashes": "~0.4.14",
Expand All @@ -55,16 +55,16 @@
"async-iterator-all": "^1.0.0",
"async-iterator-batch": "~0.0.1",
"async-iterator-first": "^1.0.0",
"bl": "^3.0.0",
"bl": "^4.0.0",
"deep-extend": "~0.6.0",
"err-code": "^2.0.0",
"hamt-sharding": "~0.0.2",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs": "^0.2.0",
"ipld-dag-pb": "^0.18.0",
"multicodec": "~0.5.1",
"multihashing-async": "~0.7.0",
"multihashing-async": "^0.8.0",
"rabin-wasm": "~0.0.8",
"superstruct": "~0.6.1"
"superstruct": "^0.8.2"
},
"contributors": [
"Alan Shaw <alan.shaw@protocol.ai>",
Expand Down
8 changes: 8 additions & 0 deletions src/dag-builder/file/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,14 @@ const reduce = (file, ipld, options) => {
// create a parent node and add all the leaves
const f = new UnixFS('file')

if (options.mtime) {
f.mtime = options.mtime
}

if (options.mode) {
f.mode = options.mode
}

const links = leaves
.filter(leaf => {
if (leaf.cid.codec === 'raw' && leaf.node.length) {
Expand Down
12 changes: 10 additions & 2 deletions src/dag-builder/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,18 @@ async function * dagBuilder (source, ipld, options) {
const chunker = createChunker(options.chunker, validateChunks(source), options.chunkerOptions)

// item is a file
yield fileBuilder(entry, chunker, ipld, options)
yield fileBuilder(entry, chunker, ipld, {
...options,
mode: entry.mode,
mtime: entry.mtime
})
} else {
// item is a directory
yield dirBuilder(entry, ipld, options)
yield dirBuilder(entry, ipld, {
...options,
mode: entry.mode,
mtime: entry.mtime
})
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion test/benchmark.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ describe.skip('benchmark', function () {
const times = []

after(() => {
console.info(`Percent\tms`) // eslint-disable-line no-console
console.info('Percent\tms') // eslint-disable-line no-console
times.forEach((time, index) => {
console.info(`${index}\t${parseInt(time / REPEATS)}`) // eslint-disable-line no-console
})
Expand Down
38 changes: 38 additions & 0 deletions test/importer.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -676,5 +676,43 @@ strategies.forEach((strategy) => {
}
}
})

it('supports passing mtime', async () => {
this.timeout(60 * 1000)

const options = {
rawLeaves: true
}
const now = parseInt(Date.now() / 1000)

for await (const file of importer([{
path: '1.2MiB.txt',
content: bigFile,
mtime: now
}], ipld, options)) {
const node = await exporter(file.cid, ipld)

expect(node.unixfs.mtime).to.equal(now)
}
})

it('supports passing mode', async () => {
this.timeout(60 * 1000)

const options = {
rawLeaves: true
}
const mode = parseInt('0111', 8)

for await (const file of importer([{
path: '1.2MiB.txt',
content: bigFile,
mode
}], ipld, options)) {
const node = await exporter(file.cid, ipld)

expect(node.unixfs.mode).to.equal(mode)
}
})
})
})