Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Update #190

Merged
merged 5 commits into from
Nov 7, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions circle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,12 @@ dependencies:
pre:
- google-chrome --version
- curl -L -o google-chrome.deb https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
- for v in $(curl http://archive.ubuntu.com/ubuntu/pool/main/n/nss/ | grep "href=" | grep "libnss3.*deb\"" -o | grep -o "libnss3.*deb" | grep "3.28" | grep "14.04"); do curl -L -o $v http://archive.ubuntu.com/ubuntu/pool/main/n/nss/$v; done && rm libnss3-tools*_i386.deb libnss3-dev*_i386.deb
- sudo dpkg -i google-chrome.deb || true
- sudo dpkg -i libnss3*.deb || true
- sudo apt-get update
- sudo apt-get install -f || true
- sudo dpkg -i libnss3*.deb
- sudo apt-get install -f
- sudo apt-get install --only-upgrade lsb-base
- sudo dpkg -i google-chrome.deb
Expand Down
20 changes: 10 additions & 10 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,38 +43,38 @@
"aegir": "^11.0.2",
"chai": "^4.1.2",
"dirty-chai": "^2.0.1",
"ipfs": "^0.25.4",
"ipfs": "^0.26.0",
"ipfs-block-service": "^0.12.0",
"ipfs-repo": "^0.17.0",
"ncp": "^2.0.0",
"pre-commit": "^1.2.2",
"pull-generate": "^2.2.0",
"pull-zip": "^2.0.1",
"rimraf": "^2.6.1",
"sinon": "^3.2.1",
"rimraf": "^2.6.2",
"sinon": "^4.0.1",
"split": "^1.0.1"
},
"dependencies": {
"async": "^2.5.0",
"bs58": "^4.0.1",
"cids": "^0.5.1",
"cids": "~0.5.2",
"deep-extend": "^0.5.0",
"ipfs-unixfs": "^0.1.13",
"ipld-dag-pb": "^0.11.2",
"ipld-resolver": "^0.13.1",
"ipld-resolver": "^0.13.4",
"left-pad": "^1.1.3",
"lodash": "^4.17.4",
"multihashes": "^0.4.9",
"multihashing-async": "^0.4.6",
"multihashes": "~0.4.12",
"multihashing-async": "~0.4.7",
"pull-batch": "^1.0.0",
"pull-block": "^1.2.0",
"pull-block": "1.2.0",
"pull-cat": "^1.1.11",
"pull-defer": "^0.2.2",
"pull-pair": "^1.1.0",
"pull-paramap": "^1.2.2",
"pull-pause": "0.0.1",
"pull-pushable": "^2.1.1",
"pull-stream": "^3.6.0",
"pull-stream": "^3.6.1",
"pull-traverse": "^1.0.3",
"pull-write": "^1.1.4",
"sparse-array": "^1.3.1"
Expand All @@ -93,4 +93,4 @@
"jbenet <juan@benet.ai>",
"nginnever <ginneversource@gmail.com>"
]
}
}
6 changes: 4 additions & 2 deletions test/test-builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@ module.exports = (repo) => {
describe('builder', () => {
let ipldResolver

const testMultihashes = Object.keys(mh.names).slice(0, 40)

before(() => {
const bs = new BlockService(repo)
ipldResolver = new IPLDResolver(bs)
})

it('allows multihash hash algorithm to be specified', (done) => {
eachSeries(Object.keys(mh.names), (hashAlg, cb) => {
eachSeries(testMultihashes, (hashAlg, cb) => {
const options = { hashAlg, strategy: 'flat' }
const content = String(Math.random() + Date.now())
const inputFile = {
Expand Down Expand Up @@ -59,7 +61,7 @@ module.exports = (repo) => {
})

it('allows multihash hash algorithm to be specified for big file', (done) => {
eachSeries(Object.keys(mh.names), (hashAlg, cb) => {
eachSeries(testMultihashes, (hashAlg, cb) => {
const options = { hashAlg, strategy: 'flat' }
const content = String(Math.random() + Date.now())
const inputFile = {
Expand Down
10 changes: 5 additions & 5 deletions test/test-fixed-size-chunker.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ const rawFile = loadFixture(__dirname, 'fixtures/1MiB.txt')

describe('chunker: fixed size', () => {
it('chunks non flat buffers', (done) => {
const b1 = new Buffer(2 * 256)
const b2 = new Buffer(1 * 256)
const b3 = new Buffer(5 * 256)
const b1 = Buffer.alloc(2 * 256)
const b2 = Buffer.alloc(1 * 256)
const b3 = Buffer.alloc(5 * 256)

b1.fill('a')
b2.fill('b')
Expand All @@ -36,7 +36,7 @@ describe('chunker: fixed size', () => {

it('256 Bytes chunks', (done) => {
pull(
pull.infinite(() => Buffer([1])),
pull.infinite(() => Buffer.from('a')),
pull.take(256 * 12),
chunker(256),
pull.collect((err, chunks) => {
Expand Down Expand Up @@ -69,7 +69,7 @@ describe('chunker: fixed size', () => {

it('256 KiB chunks of non scalar filesize', (done) => {
const KiB256 = 262144
let file = Buffer.concat([rawFile, new Buffer('hello')])
let file = Buffer.concat([rawFile, Buffer.from('hello')])

pull(
pull.values(file),
Expand Down