diff --git a/README.md b/README.md index 8e882df5..5e7a445a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# ipfs-unixfs JavaScript Implementation +# ipfs-unixfs JavaScript Implementation [![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) @@ -10,36 +10,34 @@ ![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) ![](https://img.shields.io/badge/Node.js-%3E%3D8.0.0-orange.svg?style=flat-square) -> JavaScript implementation of IPFS' unixfs (a Unix FileSystem files representation on top of a MerkleDAG) +> JavaScript implementation of IPFS' UnixFS (a Unix FileSystem files representation on top of a MerkleDAG) -[The unixfs spec can be found inside the ipfs/specs repository](http://github.com/ipfs/specs) +The UnixFS spec can be found inside the [ipfs/specs repository](http://github.com/ipfs/specs) -## Lead Maintainer +## Lead Maintainer [Alex Potsides](https://github.com/achingbrain) -## Table of Contents - -- [ipfs-unixfs JavaScript Implementation](#ipfs-unixfs-javascript-implementation) - - [Lead Maintainer](#lead-maintainer) - - [Table of Contents](#table-of-contents) - - [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify--webpack-or-any-other-bundler) - - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) - - [Usage](#usage) - - [Examples](#examples) - - [Create a file composed by several blocks](#create-a-file-composed-by-several-blocks) - - [Create a directory that contains several files](#create-a-directory-that-contains-several-files) - - [API](#api) - - [unixfs Data Structure](#unixfs-data-structure) - - [create an unixfs Data element](#create-an-unixfs-data-element) - - [add and remove a block size to the block size list](#add-and-remove-a-block-size-to-the-block-size-list) - - [get total fileSize](#get-total-filesize) - - [marshal and unmarshal](#marshal-and-unmarshal) - - [Contribute](#contribute) - - [License](#license) +## Table of Contents + +- [Install](#install) + - [npm](#npm) + - [Use in Node.js](#use-in-nodejs) + - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) + - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) +- [Usage](#usage) + - [Examples](#examples) + - [Create a file composed by several blocks](#create-a-file-composed-by-several-blocks) + - [Create a directory that contains several files](#create-a-directory-that-contains-several-files) +- [API](#api) + - [UnixFS Data Structure](#unixfs-data-structure) + - [create an unixfs Data element](#create-an-unixfs-data-element) + - [add and remove a block size to the block size list](#add-and-remove-a-block-size-to-the-block-size-list) + - [get total fileSize](#get-total-filesize) + - [marshal and unmarshal](#marshal-and-unmarshal) + - [is this UnixFS entry a directory?](#is-this-unixfs-entry-a-directory) +- [Contribute](#contribute) +- [License](#license) ## Install @@ -52,7 +50,7 @@ ### Use in Node.js ```JavaScript -var Unixfs = require('ipfs-unixfs') +var UnixFS = require('ipfs-unixfs') ``` ### Use in a browser with browserify, webpack or any other bundler @@ -60,12 +58,12 @@ var Unixfs = require('ipfs-unixfs') The code published to npm that gets loaded on require is in fact a ES5 transpiled version with the right shims added. This means that you can require it and use with your favourite bundler without having to adjust asset management process. ```JavaScript -var Unixfs = require('ipfs-unixfs') +var UnixFS = require('ipfs-unixfs') ``` ### Use in a browser Using a script tag -Loading this module through a script tag will make the `Unixfs` obj available in the global namespace. +Loading this module through a script tag will make the `UnixFS` obj available in the global namespace. ```html @@ -80,7 +78,7 @@ Loading this module through a script tag will make the `Unixfs` obj available in #### Create a file composed by several blocks ```JavaScript -var data = new Unixfs('file') +const data = new UnixFS({ type: 'file' }) data.addBlockSize(256) // add the size of each block data.addBlockSize(256) // ... @@ -91,14 +89,16 @@ data.addBlockSize(256) Creating a directory that contains several files is achieve by creating a unixfs element that identifies a MerkleDAG node as a directory. The links of that MerkleDAG node are the files that are contained in this directory. ```JavaScript -var data = new Unixfs('directory') +const data = new UnixFS({ type: 'directory' }) ``` ## API -#### unixfs Data Structure +#### UnixFS Data Structure ```protobuf +syntax = "proto2"; + message Data { enum DataType { Raw = 0; @@ -113,9 +113,10 @@ message Data { optional bytes Data = 2; optional uint64 filesize = 3; repeated uint64 blocksizes = 4; - optional uint64 hashType = 5; optional uint64 fanout = 6; + optional uint32 mode = 7; + optional int64 mtime = 8; } message Metadata { @@ -126,10 +127,22 @@ message Metadata { #### create an unixfs Data element ```JavaScript -var data = new UnixFS(, []) +const data = new UnixFS([options]) ``` -Type can be: `['raw', 'directory', 'file', 'metadata', 'symlink', 'hamt-sharded-directory']` +`options` is an optional object argument that might include the following keys: + +- type (string, default `file`): The type of UnixFS entry. Can be: + - `raw` + - `directory` + - `file` + - `metadata` + - `symlink` + - `hamt-sharded-directory` +- data (Buffer): The optional data field for this node +- blockSizes (Array, default: `[]`): If this is a `file` node that is made up of multiple blocks, `blockSizes` is a list numbers that represent the size of the file chunks stored in each child node. It is used to calculate the total file size. +- mode (Number, default `0644` for files, `0755` for directories/hamt-sharded-directories) file mode +- mtime (Date, default `0`): The modification time of this node #### add and remove a block size to the block size list @@ -149,9 +162,19 @@ data.fileSize() // => size in bytes #### marshal and unmarshal +```javascript +const marshaled = data.marshal() +const unmarshaled = Unixfs.unmarshal(marshaled) ``` -var marshaled = data.marshal() -var unmarshaled = Unixfs.unmarshal(marshaled) + +#### is this UnixFS entry a directory? + +```JavaScript +const dir = new Data({ type: 'directory' }) +dir.isDirectory() // true + +const file = new Data({ type: 'file' }) +file.isDirectory() // false ``` ## Contribute diff --git a/package.json b/package.json index 03bea065..f00d3084 100644 --- a/package.json +++ b/package.json @@ -38,11 +38,10 @@ "devDependencies": { "aegir": "^20.4.1", "chai": "^4.2.0", - "dirty-chai": "^2.0.1", - "safe-buffer": "^5.1.2" + "dirty-chai": "^2.0.1" }, "dependencies": { - "protons": "^1.0.1" + "protons": "^1.1.0" }, "contributors": [ "David Dias ", diff --git a/src/index.js b/src/index.js index 00dbf124..5e0c9bb8 100644 --- a/src/index.js +++ b/src/index.js @@ -2,9 +2,7 @@ const protons = require('protons') const pb = protons(require('./unixfs.proto')) -// encode/decode const unixfsData = pb.Data -// const unixfsMetadata = pb.MetaData // encode/decode const types = [ 'raw', @@ -20,37 +18,94 @@ const dirTypes = [ 'hamt-sharded-directory' ] -function Data (type, data) { - if (!(this instanceof Data)) { - return new Data(type, data) +const DEFAULT_FILE_MODE = parseInt('0644', 8) +const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8) + +function parseArgs (args) { + if (args.length === 0) { + return { + type: 'file' + } + } + + if (args.length === 2) { + // support old-style constructor + return { + type: args[0], + data: args[1] + } + } + + if (typeof args[0] === 'string' || args[0] instanceof String) { + return { + type: args[0] + } } - if (types.indexOf(type) === -1) { - throw new Error('Type: ' + type + ' is not valid') + + return args[0] +} + +class Data { + // decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md + static unmarshal (marshaled) { + const decoded = unixfsData.decode(marshaled) + + return new Data({ + type: types[decoded.Type], + data: decoded.hasData() ? decoded.Data : undefined, + blockSizes: decoded.blocksizes, + mode: decoded.hasMode() ? decoded.mode : undefined, + mtime: decoded.hasMtime() ? new Date(decoded.mtime * 1000) : undefined + }) } - this.type = type - this.data = data - this.blockSizes = [] + constructor (...args) { + const { + type, + data, + blockSizes, + hashType, + fanout, + mtime, + mode + } = parseArgs(args) + + if (!types.includes(type)) { + throw new Error('Type: ' + type + ' is not valid') + } + + this.type = type + this.data = data + this.hashType = hashType + this.fanout = fanout + this.blockSizes = blockSizes || [] + this.mtime = mtime || new Date(0) + this.mode = mode + + if (this.mode === undefined && type === 'file') { + this.mode = DEFAULT_FILE_MODE + } - if (this.type === 'file') { - this.mode = parseInt('0644', 8) + if (this.mode === undefined && this.isDirectory()) { + this.mode = DEFAULT_DIRECTORY_MODE + } } - if (this.type === 'directory' || this.type === 'hamt-sharded-directory') { - this.mode = parseInt('0755', 8) + isDirectory () { + return dirTypes.includes(this.type) } - this.addBlockSize = (size) => { + addBlockSize (size) { this.blockSizes.push(size) } - this.removeBlockSize = (index) => { + removeBlockSize (index) { this.blockSizes.splice(index, 1) } // data.length + blockSizes - this.fileSize = () => { - if (dirTypes.indexOf(this.type) >= 0) { + fileSize () { + if (this.isDirectory()) { // dirs don't have file size return undefined } @@ -59,14 +114,16 @@ function Data (type, data) { this.blockSizes.forEach((size) => { sum += size }) - if (data) { - sum += data.length + + if (this.data) { + sum += this.data.length } + return sum } // encode to protobuf - this.marshal = () => { + marshal () { let type switch (this.type) { @@ -92,12 +149,28 @@ function Data (type, data) { blockSizes = undefined } - if ((this.type === 'directory' || this.type === 'hamt-sharded-directory') && this.mode === parseInt('0755', 8)) { - delete this.mode + let mode + + if (!isNaN(parseInt(this.mode))) { + mode = this.mode + + if (mode === DEFAULT_FILE_MODE && this.type === 'file') { + mode = undefined + } + + if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) { + mode = undefined + } } - if (this.type === 'file' && this.mode === parseInt('0644', 8)) { - delete this.mode + let mtime + + if (this.mtime) { + mtime = Math.round(this.mtime.getTime() / 1000) + + if (mtime === 0) { + mtime = undefined + } } return unixfsData.encode({ @@ -107,30 +180,10 @@ function Data (type, data) { blocksizes: blockSizes, hashType: this.hashType, fanout: this.fanout, - mode: this.mode, - mtime: this.mtime + mode, + mtime }) } } -// decode from protobuf https://github.com/ipfs/go-ipfs/blob/master/unixfs/format.go#L24 -Data.unmarshal = (marsheled) => { - const decoded = unixfsData.decode(marsheled) - if (!decoded.Data) { - decoded.Data = undefined - } - const obj = new Data(types[decoded.Type], decoded.Data) - obj.blockSizes = decoded.blocksizes - - if (decoded.mode) { - obj.mode = decoded.mode - } - - if (decoded.mtime) { - obj.mtime = decoded.mtime - } - - return obj -} - -exports = module.exports = Data +module.exports = Data diff --git a/src/unixfs.proto.js b/src/unixfs.proto.js index b8d4fd3c..fcc8931d 100644 --- a/src/unixfs.proto.js +++ b/src/unixfs.proto.js @@ -1,6 +1,9 @@ 'use strict' -module.exports = `message Data { +module.exports = ` +syntax = "proto2"; + +message Data { enum DataType { Raw = 0; Directory = 1; @@ -22,4 +25,5 @@ module.exports = `message Data { message Metadata { optional string MimeType = 1; -}` +} +` diff --git a/test/unixfs-format.spec.js b/test/unixfs-format.spec.js index 0f352e76..766d99be 100644 --- a/test/unixfs-format.spec.js +++ b/test/unixfs-format.spec.js @@ -12,131 +12,159 @@ const raw = loadFixture('test/fixtures/raw.unixfs') const directory = loadFixture('test/fixtures/directory.unixfs') const file = loadFixture('test/fixtures/file.txt.unixfs') const symlink = loadFixture('test/fixtures/symlink.txt.unixfs') -const Buffer = require('safe-buffer').Buffer +const { Buffer } = require('buffer') describe('unixfs-format', () => { + it('defaults to file', () => { + const data = new UnixFS() + expect(data.type).to.equal('file') + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) + }) + it('raw', () => { - const data = new UnixFS('raw', Buffer.from('bananas')) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const data = new UnixFS({ + type: 'raw', + data: Buffer.from('bananas') + }) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('directory', () => { - const data = new UnixFS('directory') - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const data = new UnixFS({ + type: 'directory' + }) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('hamt-sharded-directory', () => { - const data = new UnixFS('hamt-sharded-directory') - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const data = new UnixFS({ + type: 'hamt-sharded-directory' + }) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('file', () => { - const data = new UnixFS('file', Buffer.from('batata')) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const data = new UnixFS({ + type: 'file', + data: Buffer.from('batata') + }) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('file add blocksize', () => { - const data = new UnixFS('file') + const data = new UnixFS({ + type: 'file' + }) data.addBlockSize(256) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('file add and remove blocksize', () => { - const data = new UnixFS('file') + const data = new UnixFS({ + type: 'file' + }) data.addBlockSize(256) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) - unmarshalled.removeBlockSize(0) - expect(data.blockSizes).to.not.deep.equal(unmarshalled.blockSizes) - }) - - it('default mode for files', () => { - const data = new UnixFS('file') - expect(data.mode).to.equal(parseInt('0644', 8)) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(unmarshalled.mode).to.equal(parseInt('0644', 8)) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) + unmarshaled.removeBlockSize(0) + expect(data.blockSizes).to.not.deep.equal(unmarshaled.blockSizes) }) - it('default mode for directories', () => { - const data = new UnixFS('directory') - expect(data.mode).to.equal(parseInt('0755', 8)) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(unmarshalled.mode).to.equal(parseInt('0755', 8)) - }) + it('mode', () => { + const mode = parseInt('0555', 8) + const data = new UnixFS({ + type: 'file' + }) + data.mode = mode - it('default mode for hamt-sharded-directories', () => { - const data = new UnixFS('hamt-sharded-directory') - expect(data.mode).to.equal(parseInt('0755', 8)) - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(unmarshalled.mode).to.equal(parseInt('0755', 8)) + expect(UnixFS.unmarshal(data.marshal())).to.have.property('mode', mode) }) - it('mode', () => { - const mode = parseInt('0555', 8) - const data = new UnixFS('file') + it('sets mode to 0', () => { + const mode = 0 + const data = new UnixFS({ + type: 'file' + }) data.mode = mode - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(unmarshalled.mode).to.equal(mode) + + expect(UnixFS.unmarshal(data.marshal())).to.have.property('mode', mode) }) it('mtime', () => { - const mtime = parseInt(Date.now() / 1000) - const data = new UnixFS('file') - data.mtime = mtime - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(unmarshalled.mtime).to.equal(mtime) + const mtime = new Date() + const data = new UnixFS({ + type: 'file', + mtime + }) + + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(unmarshaled.mtime).to.deep.equal(new Date(Math.round(mtime.getTime() / 1000) * 1000)) + }) + + it('sets mtime to 0', () => { + const mtime = new Date(0) + const data = new UnixFS({ + type: 'file', + mtime + }) + expect(UnixFS.unmarshal(data.marshal())).to.have.deep.property('mtime', new Date(Math.round(mtime.getTime() / 1000) * 1000)) }) // figuring out what is this metadata for https://github.com/ipfs/js-ipfs-data-importing/issues/3#issuecomment-182336526 it.skip('metadata', () => {}) it('symlink', () => { - const data = new UnixFS('symlink') - const marshalled = data.marshal() - const unmarshalled = UnixFS.unmarshal(marshalled) - expect(data.type).to.equal(unmarshalled.type) - expect(data.data).to.deep.equal(unmarshalled.data) - expect(data.blockSizes).to.deep.equal(unmarshalled.blockSizes) - expect(data.fileSize()).to.deep.equal(unmarshalled.fileSize()) + const data = new UnixFS({ + type: 'symlink' + }) + const marshaled = data.marshal() + const unmarshaled = UnixFS.unmarshal(marshaled) + expect(data.type).to.equal(unmarshaled.type) + expect(data.data).to.deep.equal(unmarshaled.data) + expect(data.blockSizes).to.deep.equal(unmarshaled.blockSizes) + expect(data.fileSize()).to.deep.equal(unmarshaled.fileSize()) }) it('wrong type', (done) => { let data try { - data = new UnixFS('bananas') + data = new UnixFS({ + type: 'bananas' + }) } catch (err) { expect(err).to.exist() expect(data).to.not.exist() @@ -146,42 +174,44 @@ describe('unixfs-format', () => { describe('interop', () => { it('raw', () => { - const unmarshalled = UnixFS.unmarshal(raw) - expect(unmarshalled.data).to.eql(Buffer.from('Hello UnixFS\n')) - expect(unmarshalled.type).to.equal('file') - expect(unmarshalled.marshal()).to.deep.equal(raw) + const unmarshaled = UnixFS.unmarshal(raw) + expect(unmarshaled.data).to.eql(Buffer.from('Hello UnixFS\n')) + expect(unmarshaled.type).to.equal('file') + expect(unmarshaled.marshal()).to.deep.equal(raw) }) it('directory', () => { - const unmarshalled = UnixFS.unmarshal(directory) - expect(unmarshalled.data).to.deep.equal(undefined) - expect(unmarshalled.type).to.equal('directory') - expect(unmarshalled.marshal()).to.deep.equal(directory) + const unmarshaled = UnixFS.unmarshal(directory) + expect(unmarshaled.data).to.deep.equal(undefined) + expect(unmarshaled.type).to.equal('directory') + expect(unmarshaled.marshal()).to.deep.equal(directory) }) it('file', () => { - const unmarshalled = UnixFS.unmarshal(file) - expect(unmarshalled.data).to.deep.equal(Buffer.from('Hello UnixFS\n')) - expect(unmarshalled.type).to.equal('file') - expect(unmarshalled.marshal()).to.deep.equal(file) + const unmarshaled = UnixFS.unmarshal(file) + expect(unmarshaled.data).to.deep.equal(Buffer.from('Hello UnixFS\n')) + expect(unmarshaled.type).to.equal('file') + expect(unmarshaled.marshal()).to.deep.equal(file) }) it.skip('metadata', () => { }) it('symlink', () => { - const unmarshalled = UnixFS.unmarshal(symlink) - expect(unmarshalled.data).to.deep.equal(Buffer.from('file.txt')) - expect(unmarshalled.type).to.equal('symlink') + const unmarshaled = UnixFS.unmarshal(symlink) + expect(unmarshaled.data).to.deep.equal(Buffer.from('file.txt')) + expect(unmarshaled.type).to.equal('symlink') // TODO: waiting on https://github.com/ipfs/js-ipfs-data-importing/issues/3#issuecomment-182440079 - // expect(unmarshalled.marshal()).to.deep.equal(symlink) + // expect(unmarshaled.marshal()).to.deep.equal(symlink) }) }) it('empty', () => { - const data = new UnixFS('file') - const marshalled = data.marshal() + const data = new UnixFS({ + type: 'file' + }) + const marshaled = data.marshal() - expect(marshalled).to.deep.equal(Buffer.from([0x08, 0x02, 0x18, 0x00])) + expect(marshaled).to.deep.equal(Buffer.from([0x08, 0x02, 0x18, 0x00])) }) })