Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
feat: upgrade to the next version of ipfs-block and blockservice
Browse files Browse the repository at this point in the history
  • Loading branch information
dignifiedquire authored and daviddias committed Mar 22, 2017
1 parent 304ff25 commit 0ca25b2
Show file tree
Hide file tree
Showing 137 changed files with 159 additions and 127 deletions.
28 changes: 13 additions & 15 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,32 +39,30 @@
},
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme",
"devDependencies": {
"aegir": "^10.0.0",
"buffer-loader": "0.0.1",
"aegir": "^11.0.1",
"chai": "^3.5.0",
"fs-pull-blob-store": "^0.4.1",
"idb-pull-blob-store": "^0.5.1",
"ipfs-block-service": "^0.8.1",
"ipfs-repo": "^0.11.2",
"dirty-chai": "^1.2.2",
"ipfs-block-service": "^0.9.0",
"ipfs-repo": "^0.12.0",
"ncp": "^2.0.0",
"pre-commit": "^1.2.2",
"pull-generate": "^2.2.0",
"pull-zip": "^2.0.1",
"rimraf": "^2.5.4"
"rimraf": "^2.6.1"
},
"dependencies": {
"async": "^2.1.4",
"cids": "^0.4.0",
"async": "^2.1.5",
"cids": "^0.4.2",
"deep-extend": "^0.4.1",
"ipfs-unixfs": "^0.1.9",
"ipld-dag-pb": "^0.10.0",
"ipld-resolver": "^0.8.0",
"ipfs-unixfs": "^0.1.11",
"ipld-dag-pb": "^0.11.0",
"ipld-resolver": "^0.11.0",
"is-ipfs": "^0.3.0",
"lodash": "^4.17.4",
"multihashes": "^0.3.2",
"multihashes": "^0.4.4",
"pull-batch": "^1.0.0",
"pull-cat": "^1.1.11",
"pull-block": "^1.1.0",
"pull-cat": "^1.1.11",
"pull-pair": "^1.1.0",
"pull-paramap": "^1.2.1",
"pull-pause": "0.0.1",
Expand All @@ -85,4 +83,4 @@
"jbenet <[email protected]>",
"nginnever <[email protected]>"
]
}
}
8 changes: 4 additions & 4 deletions src/builder/trickle/trickle-reducer.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,14 +110,14 @@ module.exports = function trickleReduceToRoot (reduce, options) {
function iterate () {
deeper = null
iteration++
if (depth === 0 && iteration === options.maxChildrenPerNode ||
depth > 0 && iteration === options.layerRepeat) {
if ((depth === 0 && iteration === options.maxChildrenPerNode) ||
(depth > 0 && iteration === options.layerRepeat)) {
iteration = 0
depth++
}

if (!aborting && maxDepth >= 0 && depth > maxDepth ||
aborting && !pendingResumes) {
if ((!aborting && maxDepth >= 0 && depth > maxDepth) ||
(aborting && !pendingResumes)) {
aborting = true
result.end()
}
Expand Down
57 changes: 21 additions & 36 deletions test/browser.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@
/* global self */
'use strict'

const Store = require('idb-pull-blob-store')
const series = require('async/series')
const IPFSRepo = require('ipfs-repo')
const repoContext = require.context('buffer!./repo-example', true)
const pull = require('pull-stream')

const idb = self.indexedDB ||
self.mozIndexedDB ||
Expand All @@ -16,47 +14,34 @@ idb.deleteDatabase('ipfs')
idb.deleteDatabase('ipfs/blocks')

describe('IPFS data importing tests on the Browser', function () {
before(function (done) {
this.timeout(23000)
const repoData = []
repoContext.keys().forEach(function (key) {
repoData.push({
key: key.replace('./', ''),
value: repoContext(key)
})
})
const repo = new IPFSRepo('ipfs')

const mainBlob = new Store('ipfs')
const blocksBlob = new Store('ipfs/blocks')

pull(
pull.values(repoData),
pull.asyncMap((file, cb) => {
if (file.key.indexOf('datastore/') === 0) {
return cb()
}

const blocks = file.key.indexOf('blocks/') === 0
const blob = blocks ? blocksBlob : mainBlob
const key = blocks ? file.key.replace(/^blocks\//, '') : file.key

pull(
pull.values([file.value]),
blob.write(key, cb)
)
}),
pull.onEnd(done)
)
before((done) => {
series([
(cb) => repo.init({}, cb),
(cb) => repo.open(cb)
], done)
})

// create the repo constant to be used in the import a small buffer test
const repo = new IPFSRepo('ipfs', {stores: Store})
after((done) => {
series([
(cb) => repo.close(cb),
(cb) => {
idb.deleteDatabase('ipfs')
idb.deleteDatabase('ipfs/blocks')
cb()
}
], done)
})

require('./test-flat-builder')
require('./test-balanced-builder')
require('./test-trickle-builder')
require('./test-fixed-size-chunker')
require('./test-exporter')(repo)

// relies on data in the repo
// require('./test-exporter')(repo)

require('./test-importer')(repo)
require('./test-import-export')(repo)
require('./test-hash-parity-with-go-ipfs')(repo)
Expand Down
28 changes: 14 additions & 14 deletions test/node.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,37 +5,37 @@ const ncp = require('ncp').ncp
const rimraf = require('rimraf')
const path = require('path')
const IPFSRepo = require('ipfs-repo')
const Store = require('fs-pull-blob-store')
const mkdirp = require('mkdirp')
const series = require('async/series')

describe('IPFS UnixFS Engine', () => {
const repoExample = path.join(process.cwd(), '/test/repo-example')
const repoExample = path.join(process.cwd(), '/test/test-repo')
const repoTests = path.join(process.cwd(), '/test/repo-tests' + Date.now())

before((done) => {
ncp(repoExample, repoTests, (err) => {
process.env.IPFS_PATH = repoTests
done(err)
})
})
const repo = new IPFSRepo(repoTests)

before((done) => {
const paths = [
'test-data/dir-nested/dir-another',
'test-data/dir-nested/level-1/level-2'
]

series(paths.map((p) => (cb) => {
mkdirp(path.join(__dirname, p), cb)
}), done)
process.env.IPFS_PATH = repoTests
series([
(cb) => ncp(repoExample, repoTests, cb),
(cb) => repo.open(cb),
(cb) => series(paths.map((p) => (cb) => {
mkdirp(path.join(__dirname, p), cb)
}), cb)
], done)
})

after((done) => {
rimraf(repoTests, done)
series([
(cb) => repo.close(cb),
(cb) => rimraf(repoTests, cb)
], done)
})

const repo = new IPFSRepo(repoTests, {stores: Store})
require('./test-flat-builder')
require('./test-balanced-builder')
require('./test-trickle-builder')
Expand Down
1 change: 0 additions & 1 deletion test/repo-example/version

This file was deleted.

12 changes: 7 additions & 5 deletions test/test-balanced-builder.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
/* eslint-env mocha */
'use strict'

const expect = require('chai').expect
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const pull = require('pull-stream')

const builder = require('../src/builder/balanced')
Expand All @@ -24,7 +26,7 @@ describe('balanced builder', () => {
pull.values([1]),
builder(reduce, options),
pull.collect((err, result) => {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(result).to.be.eql([1])
callback()
})
Expand All @@ -36,7 +38,7 @@ describe('balanced builder', () => {
pull.values([1, 2, 3]),
builder(reduce, options),
pull.collect((err, result) => {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(result).to.be.eql([{
children: [1, 2, 3]
}])
Expand All @@ -50,7 +52,7 @@ describe('balanced builder', () => {
pull.values([1, 2, 3, 4]),
builder(reduce, options),
pull.collect((err, result) => {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(result).to.be.eql([
{
children: [
Expand All @@ -71,7 +73,7 @@ describe('balanced builder', () => {
pull.values([1, 2, 3, 4, 5, 6, 7]),
builder(reduce, options),
pull.collect((err, result) => {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(result).to.be.eql([
{
children: [
Expand Down
30 changes: 16 additions & 14 deletions test/test-exporter.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
/* eslint-env mocha */
'use strict'

const expect = require('chai').expect
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const BlockService = require('ipfs-block-service')
const IPLDResolver = require('ipld-resolver')
const UnixFS = require('ipfs-unixfs')
Expand Down Expand Up @@ -31,7 +33,7 @@ module.exports = (repo) => {
const cid = new CID(hash)

ipldResolver.get(cid, (err, result) => {
expect(err).to.not.exist
expect(err).to.not.exist()
const node = result.value
const unmarsh = UnixFS.unmarshal(node.data)

Expand All @@ -41,7 +43,7 @@ module.exports = (repo) => {
)

function onFiles (err, files) {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(files).to.have.length(1)
expect(files[0]).to.have.property('path', hash)

Expand All @@ -56,13 +58,13 @@ module.exports = (repo) => {
pull(
zip(
pull(
ipldResolver._getStream(new CID(hash)),
pull.map((node) => UnixFS.unmarshal(node.data))
ipldResolver.getStream(new CID(hash)),
pull.map((res) => UnixFS.unmarshal(res.value.data))
),
exporter(hash, ipldResolver)
),
pull.collect((err, values) => {
expect(err).to.not.exist
expect(err).to.not.exist()
const unmarsh = values[0][0]
const file = values[0][1]

Expand All @@ -76,7 +78,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
expect(err).to.not.exist
expect(err).to.not.exist()

fileEql(files[0], bigFile, done)
})
Expand All @@ -88,7 +90,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
expect(err).to.not.exist
expect(err).to.not.exist()

expect(files[0]).to.have.property('path', 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE')
fileEql(files[0], null, done)
Expand All @@ -102,7 +104,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
expect(err).to.not.exist
expect(err).to.not.exist()

expect(
files.map((file) => file.path)
Expand All @@ -119,7 +121,7 @@ module.exports = (repo) => {
pull.values(files),
pull.map((file) => Boolean(file.content)),
pull.collect((err, contents) => {
expect(err).to.not.exist
expect(err).to.not.exist()
expect(contents).to.be.eql([
false,
true,
Expand All @@ -141,8 +143,8 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
expect(err).to.not.exist
expect(files[0].content).to.not.exist
expect(err).to.not.exist()
expect(files[0].content).to.not.exist()
done()
})
)
Expand All @@ -155,7 +157,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
expect(err).to.exist
expect(err).to.exist()
done()
})
)
Expand All @@ -175,7 +177,7 @@ function fileEql (f1, f2, done) {
if (f2) {
expect(Buffer.concat(data)).to.eql(f2)
} else {
expect(data).to.exist
expect(data).to.exist()
}
} catch (err) {
return done(err)
Expand Down
Loading

0 comments on commit 0ca25b2

Please sign in to comment.