Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

feat: support exporting nodes encoded with the identity hash #27

Merged
merged 2 commits into from
Jan 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
"ipld-in-memory": "^3.0.0",
"ipfs-unixfs-importer": "^0.42.0",
"multicodec": "^1.0.0",
"multihashes": "^0.4.14",
"nyc": "^15.0.0",
"sinon": "^8.0.4"
},
Expand Down
38 changes: 38 additions & 0 deletions src/resolvers/identity.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
'use strict'

const errCode = require('err-code')
const extractDataFromBlock = require('../utils/extract-data-from-block')
const validateOffsetAndLength = require('../utils/validate-offset-and-length')
const mh = require('multihashing-async').multihash

const rawContent = (node) => {
return function * (options = {}) {
const {
offset,
length
} = validateOffsetAndLength(node.length, options.offset, options.length)

yield extractDataFromBlock(node, 0, offset, offset + length)
}
}

const resolve = async (cid, name, path, toResolve, resolve, depth, ipld) => {
if (toResolve.length) {
throw errCode(new Error(`No link named ${path} found in raw node ${cid.toBaseEncodedString()}`), 'ERR_NOT_FOUND')
}

const buf = await mh.decode(cid.multihash)

return {
entry: {
name,
path,
cid,
node: buf,
content: rawContent(buf.digest),
depth
}
}
}

module.exports = resolve
3 changes: 2 additions & 1 deletion src/resolvers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ const errCode = require('err-code')
const resolvers = {
'dag-pb': require('./unixfs-v1'),
raw: require('./raw'),
'dag-cbor': require('./dag-cbor')
'dag-cbor': require('./dag-cbor'),
identity: require('./identity')
}

const resolve = (cid, name, path, toResolve, depth, ipld) => {
Expand Down
6 changes: 3 additions & 3 deletions test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const expect = chai.expect
const IPLD = require('ipld')
const inMemory = require('ipld-in-memory')
const UnixFS = require('ipfs-unixfs')
const mh = require('multihashes')
const mh = require('multihashing-async').multihash
const mc = require('multicodec')
const all = require('async-iterator-all')
const last = require('async-iterator-last')
Expand Down Expand Up @@ -184,15 +184,15 @@ describe('exporter sharded', function () {
it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => {
const dirCid = await createShard(15)

const node = new DAGNode(new UnixFS('directory').marshal(), [
const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal(), [
new DAGLink('shard', 5, dirCid)
])
const nodeCid = await ipld.put(node, mc.DAG_PB, {
cidVersion: 0,
hashAlg: mh.names['sha2-256']
})

const shardNode = new DAGNode(new UnixFS('hamt-sharded-directory').marshal(), [
const shardNode = new DAGNode(new UnixFS({ type: 'hamt-sharded-directory' }).marshal(), [
new DAGLink('75normal-dir', 5, nodeCid)
])
const shardNodeCid = await ipld.put(shardNode, mc.DAG_PB, {
Expand Down
72 changes: 67 additions & 5 deletions test/exporter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const {
DAGNode,
DAGLink
} = require('ipld-dag-pb')
const mh = require('multihashes')
const mh = require('multihashing-async').multihash
const mc = require('multicodec')
const exporter = require('../src')
const importer = require('ipfs-unixfs-importer')
Expand All @@ -38,7 +38,10 @@ describe('exporter', () => {
options.content = options.content || Buffer.from([0x01, 0x02, 0x03])
options.links = options.links || []

const file = new UnixFS(options.type, options.content)
const file = new UnixFS({
type: options.type,
data: options.content
})

const node = new DAGNode(file.marshal(), options.links)
const cid = await ipld.put(node, mc.DAG_PB, {
Expand Down Expand Up @@ -190,7 +193,10 @@ describe('exporter', () => {

it('exports a small file with links', async () => {
const content = Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
const chunk1 = new UnixFS('raw', content.slice(0, 5))
const chunk1 = new UnixFS({
type: 'raw',
data: content.slice(0, 5)
})
const chunkNode1 = new DAGNode(chunk1.marshal())
const chunkCid1 = await ipld.put(chunkNode1, mc.DAG_PB, {
cidVersion: 0,
Expand All @@ -204,7 +210,9 @@ describe('exporter', () => {
hashAlg: mh.names['sha2-256']
})

const file = new UnixFS('file')
const file = new UnixFS({
type: 'file'
})
file.addBlockSize(5)
file.addBlockSize(5)

Expand Down Expand Up @@ -830,7 +838,9 @@ describe('exporter', () => {
foo: 'bar'
}, mc.DAG_CBOR)

const file = new UnixFS('file')
const file = new UnixFS({
type: 'file'
})
file.addBlockSize(100)

const cid = await ipld.put(new DAGNode(file.marshal(), [
Expand Down Expand Up @@ -891,4 +901,56 @@ describe('exporter', () => {
expect(exported[4].name).to.equal('qux.txt')
expect(exported[4].path).to.equal(`${dirCid}/qux.txt`)
})

it('exports a CID encoded with the identity hash', async () => {
const data = Buffer.from('hello world')
const hash = mh.encode(data, 'identity')
const cid = new CID(1, 'identity', hash)

const exported = await exporter(cid, ipld)
const result = Buffer.concat(await all(exported.content()))

expect(result).to.deep.equal(data)
expect(result.toString('utf8')).to.equal('hello world')
})

it('exports a CID encoded with the identity hash with an offset', async () => {
const data = Buffer.from('hello world')
const hash = mh.encode(data, 'identity')
const cid = new CID(1, 'identity', hash)

const exported = await exporter(cid, ipld)
const result = Buffer.concat(await all(exported.content({
offset: 1
})))

expect(result.toString('utf8')).to.equal('ello world')
})

it('exports a CID encoded with the identity hash with a length', async () => {
const data = Buffer.from('hello world')
const hash = mh.encode(data, 'identity')
const cid = new CID(1, 'identity', hash)

const exported = await exporter(cid, ipld)
const result = Buffer.concat(await all(exported.content({
length: 1
})))

expect(result.toString('utf8')).to.equal('h')
})

it('exports a CID encoded with the identity hash with an offset and a length', async () => {
const data = Buffer.from('hello world')
const hash = mh.encode(data, 'identity')
const cid = new CID(1, 'identity', hash)

const exported = await exporter(cid, ipld)
const result = Buffer.concat(await all(exported.content({
offset: 3,
length: 1
})))

expect(result.toString('utf8')).to.equal('l')
})
})