Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

chore: update cids and ipld #468

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"bl": "^3.0.0",
"bs58": "^4.0.1",
"chai": "^4.2.0",
"cids": "~0.5.8",
"cids": "~0.7.1",
"concat-stream": "^2.0.0",
"dirty-chai": "^2.0.1",
"es6-promisify": "^6.0.1",
Expand All @@ -49,8 +49,8 @@
"ipfs-block": "~0.8.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-utils": "~0.0.3",
"ipld-dag-cbor": "~0.13.1",
"ipld-dag-pb": "~0.15.3",
"ipld-dag-cbor": "~0.15.0",
"ipld-dag-pb": "~0.17.1",
"is-ipfs": "~0.6.0",
"is-plain-object": "^3.0.0",
"libp2p-crypto": "~0.16.0",
Expand Down
85 changes: 35 additions & 50 deletions src/dag/get.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

const { series, eachSeries } = require('async')
const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const { DAGNode } = dagPB
const dagCBOR = require('ipld-dag-cbor')
const Unixfs = require('ipfs-unixfs')
const CID = require('cids')
Expand Down Expand Up @@ -48,41 +48,32 @@ module.exports = (createCommon, options) => {
(cb) => {
const someData = Buffer.from('some other data')

DAGNode.create(someData, (err, node) => {
expect(err).to.not.exist()
pbNode = node
cb()
})
pbNode = DAGNode.create(someData)

cborNode = {
data: someData
}

nodePb = DAGNode.create(Buffer.from('I am inside a Protobuf'))

cb()
},
(cb) => {
dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => {
expect(err).to.not.exist()
nodePb = node
cb()
})
},
(cb) => {
dagPB.util.cid(nodePb, (err, cid) => {
expect(err).to.not.exist()
cidPb = cid
cb()
})
dagPB.util.cid(dagPB.util.serialize(nodePb))
.then(cid => { cidPb = cid })
.then(cb)
.catch(cb)
},
(cb) => {
nodeCbor = {
someData: 'I am inside a Cbor object',
pb: cidPb
}

dagCBOR.util.cid(nodeCbor, (err, cid) => {
expect(err).to.not.exist()
cidCbor = cid
cb()
})
dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor))
.then(cid => { cidCbor = cid })
.then(cb)
.catch(cb)
},
(cb) => {
eachSeries([
Expand Down Expand Up @@ -135,11 +126,10 @@ module.exports = (createCommon, options) => {

const node = result.value

dagPB.util.cid(node, (err, cid) => {
expect(err).to.not.exist()
expect(cid).to.eql(cidPb)
done()
})
dagPB.util.cid(dagPB.util.serialize(node))
.then(cid => { expect(cid).to.eql(cidPb) })
.then(done)
.catch(done)
})
})

Expand All @@ -160,11 +150,10 @@ module.exports = (createCommon, options) => {

const node = result.value

dagCBOR.util.cid(node, (err, cid) => {
expect(err).to.not.exist()
expect(cid).to.eql(cidCbor)
done()
})
dagCBOR.util.cid(dagCBOR.util.serialize(node))
.then(cid => { expect(cid).to.eql(cidCbor) })
.then(done)
.catch(done)
})
})

Expand Down Expand Up @@ -196,11 +185,10 @@ module.exports = (createCommon, options) => {

const node = result.value

dagCBOR.util.cid(node, (err, cid) => {
expect(err).to.not.exist()
expect(cid).to.eql(cidCbor)
done()
})
dagCBOR.util.cid(dagCBOR.util.serialize(node))
.then(cid => { expect(cid).to.eql(cidCbor) })
.then(done)
.catch(done)
})
})

Expand All @@ -224,21 +212,18 @@ module.exports = (createCommon, options) => {

it('should get a node added as CIDv0 with a CIDv1', done => {
const input = Buffer.from(`TEST${Date.now()}`)
const node = DAGNode.create(input)

dagPB.DAGNode.create(input, (err, node) => {
ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => {
expect(err).to.not.exist()
expect(cid.version).to.equal(0)

ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => {
expect(err).to.not.exist()
expect(cid.version).to.equal(0)

const cidv1 = cid.toV1()
const cidv1 = cid.toV1()

ipfs.dag.get(cidv1, (err, output) => {
expect(err).to.not.exist()
expect(output.value.data).to.eql(input)
done()
})
ipfs.dag.get(cidv1, (err, output) => {
expect(err).to.not.exist()
expect(output.value.Data).to.eql(input)
done()
})
})
})
Expand All @@ -256,7 +241,7 @@ module.exports = (createCommon, options) => {

ipfs.dag.get(cidv0, (err, output) => {
expect(err).to.not.exist()
expect(Unixfs.unmarshal(output.value.data).data).to.eql(input)
expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input)
done()
})
})
Expand Down
20 changes: 7 additions & 13 deletions src/dag/put.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
/* eslint-env mocha */
'use strict'

const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const { DAGNode } = require('ipld-dag-pb')
const dagCBOR = require('ipld-dag-cbor')
const CID = require('cids')
const multihash = require('multihashes')
Expand Down Expand Up @@ -38,14 +37,10 @@ module.exports = (createCommon, options) => {
let pbNode
let cborNode

before((done) => {
before(() => {
const someData = Buffer.from('some data')

DAGNode.create(someData, (err, node) => {
expect(err).to.not.exist()
pbNode = node
done()
})
pbNode = DAGNode.create(someData)

cborNode = {
data: someData
Expand Down Expand Up @@ -88,11 +83,10 @@ module.exports = (createCommon, options) => {
expect(err).to.not.exist()
expect(cid).to.exist()
expect(CID.isCID(cid)).to.equal(true)
dagCBOR.util.cid(cborNode, (err, _cid) => {
expect(err).to.not.exist()
expect(cid.buffer).to.eql(_cid.buffer)
done()
})
dagCBOR.util.cid(dagCBOR.util.serialize(cborNode))
.then(_cid => { expect(cid.buffer).to.eql(_cid.buffer) })
.then(done)
.catch(done)
})
})

Expand Down
27 changes: 10 additions & 17 deletions src/dag/tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,30 +42,23 @@ module.exports = (createCommon, options) => {
before(function (done) {
series([
(cb) => {
dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'), (err, node) => {
expect(err).to.not.exist()
nodePb = node
cb()
})
},
(cb) => {
dagPB.util.cid(nodePb, (err, cid) => {
expect(err).to.not.exist()
cidPb = cid
cb()
})
nodePb = dagPB.DAGNode.create(Buffer.from('I am inside a Protobuf'))

dagPB.util.cid(dagPB.util.serialize(nodePb))
.then(cid => { cidPb = cid })
.then(cb)
.catch(cb)
},
(cb) => {
nodeCbor = {
someData: 'I am inside a Cbor object',
pb: cidPb
}

dagCBOR.util.cid(nodeCbor, (err, cid) => {
expect(err).to.not.exist()
cidCbor = cid
cb()
})
dagCBOR.util.cid(dagPB.util.serialize(nodeCbor))
.then(cid => { cidCbor = cid })
.then(cb)
.catch(cb)
},
(cb) => {
eachSeries([
Expand Down
3 changes: 2 additions & 1 deletion src/files-regular/refs-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
const mapSeries = require('async/mapSeries')
const { getDescribe, getIt, expect } = require('../utils/mocha')
const loadFixture = require('aegir/fixtures')
const CID = require('cids')

module.exports = (createCommon, suiteName, ipfsRefs, options) => {
const describe = getDescribe(options)
Expand Down Expand Up @@ -365,7 +366,7 @@ function loadDagContent (ipfs, node, callback) {
putLinks: (links, cb) => {
const obj = {}
for (const { name, cid } of links) {
obj[name] = { '/': cid }
obj[name] = new CID(cid)
}
ipfs.dag.put(obj, cb)
}
Expand Down
Loading