Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
fix: use "ipld" instead of "ipld-resolver"
Browse files Browse the repository at this point in the history
The "ipld-resolver" has been renamed to just "ipld".
  • Loading branch information
vmx authored and daviddias committed Feb 27, 2018
1 parent 3d6c9b1 commit f4de206
Show file tree
Hide file tree
Showing 20 changed files with 114 additions and 114 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@
"cids": "~0.5.2",
"deep-extend": "~0.5.0",
"ipfs-unixfs": "~0.1.14",
"ipld": "^0.15.0",
"ipld-dag-pb": "~0.13.1",
"ipld-resolver": "~0.14.1",
"left-pad": "^1.2.0",
"lodash": "^4.17.5",
"multihashes": "~0.4.13",
Expand Down
8 changes: 4 additions & 4 deletions src/builder/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ const defaultOptions = {
}
}

module.exports = function (createChunker, ipldResolver, createReducer, _options) {
module.exports = function (createChunker, ipld, createReducer, _options) {
const options = extend({}, defaultOptions, _options)

return function (source) {
Expand Down Expand Up @@ -69,7 +69,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
cid = cid.toV1()
}

ipldResolver.put(node, { cid }, (err) => cb(err, node))
ipld.put(node, { cid }, (err) => cb(err, node))
}
], (err, node) => {
if (err) {
Expand All @@ -92,7 +92,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
return callback(new Error('invalid content'))
}

const reducer = createReducer(reduce(file, ipldResolver, options), options)
const reducer = createReducer(reduce(file, ipld, options), options)

let previous
let count = 0
Expand Down Expand Up @@ -121,7 +121,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
cid = cid.toV1()
}

ipldResolver.put(leaf.DAGNode, { cid }, (err) => callback(err, leaf))
ipld.put(leaf.DAGNode, { cid }, (err) => callback(err, leaf))
}),
pull.map((leaf) => {
return {
Expand Down
2 changes: 1 addition & 1 deletion src/builder/create-build-stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
const pullPushable = require('pull-pushable')
const pullWrite = require('pull-write')

module.exports = function createBuildStream (createStrategy, ipldResolver, options) {
module.exports = function createBuildStream (createStrategy, _ipld, options) {
const source = pullPushable()

const sink = pullWrite(
Expand Down
8 changes: 4 additions & 4 deletions src/builder/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,17 @@ const defaultOptions = {
reduceSingleLeafToSelf: false
}

module.exports = function (Chunker, ipldResolver, _options) {
module.exports = function (Chunker, ipld, _options) {
assert(Chunker, 'Missing chunker creator function')
assert(ipldResolver, 'Missing IPLD Resolver')
assert(ipld, 'Missing IPLD')

const options = Object.assign({}, defaultOptions, _options)

const strategyName = options.strategy
const reducer = reducers[strategyName]
assert(reducer, 'Unknown importer build strategy name: ' + strategyName)

const createStrategy = Builder(Chunker, ipldResolver, reducer, options)
const createStrategy = Builder(Chunker, ipld, reducer, options)

return createBuildStream(createStrategy, ipldResolver, options)
return createBuildStream(createStrategy, ipld, options)
}
4 changes: 2 additions & 2 deletions src/builder/reduce.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const CID = require('cids')
const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode

module.exports = function (file, ipldResolver, options) {
module.exports = function (file, ipld, options) {
return function (leaves, callback) {
if (leaves.length === 1 && (leaves[0].single || options.reduceSingleLeafToSelf)) {
const leave = leaves[0]
Expand Down Expand Up @@ -42,7 +42,7 @@ module.exports = function (file, ipldResolver, options) {
cid = cid.toV1()
}

ipldResolver.put(node, { cid }, (err) => cb(err, node))
ipld.put(node, { cid }, (err) => cb(err, node))
}
], (err, node) => {
if (err) {
Expand Down
4 changes: 2 additions & 2 deletions src/importer/dir-flat.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class DirFlat extends Dir {
)
}

flush (path, ipldResolver, source, callback) {
flush (path, ipld, source, callback) {
const links = Object.keys(this._children)
.map((key) => {
const child = this._children[key]
Expand All @@ -70,7 +70,7 @@ class DirFlat extends Dir {
cid = cid.toV1()
}

ipldResolver.put(node, { cid }, (err) => callback(err, node))
ipld.put(node, { cid }, (err) => callback(err, node))
},
(node, callback) => {
this.multihash = node.multihash
Expand Down
10 changes: 5 additions & 5 deletions src/importer/dir-sharded.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ class DirSharded extends Dir {
this._bucket.eachLeafSeries(iterator, callback)
}

flush (path, ipldResolver, source, callback) {
flush(this._options, this._bucket, path, ipldResolver, source, (err, node) => {
flush (path, ipld, source, callback) {
flush(this._options, this._bucket, path, ipld, source, (err, node) => {
if (err) {
callback(err)
} else {
Expand All @@ -89,7 +89,7 @@ function createDirSharded (props, _options) {
return new DirSharded(props, _options)
}

function flush (options, bucket, path, ipldResolver, source, callback) {
function flush (options, bucket, path, ipld, source, callback) {
const children = bucket._children // TODO: intromission
let index = 0
const links = []
Expand Down Expand Up @@ -119,7 +119,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
function collectChild (child, index, callback) {
const labelPrefix = leftPad(index.toString(16).toUpperCase(), 2, '0')
if (Bucket.isBucket(child)) {
flush(options, child, path, ipldResolver, null, (err, node) => {
flush(options, child, path, ipld, null, (err, node) => {
if (err) {
callback(err)
return // early
Expand Down Expand Up @@ -154,7 +154,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
cid = cid.toV1()
}

ipldResolver.put(node, { cid }, (err) => callback(err, node))
ipld.put(node, { cid }, (err) => callback(err, node))
},
(node, callback) => {
const pushable = {
Expand Down
10 changes: 5 additions & 5 deletions src/importer/flush-tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const waterfall = require('async/waterfall')
const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode

module.exports = (files, ipldResolver, source, callback) => {
module.exports = (files, ipld, source, callback) => {
// 1) convert files to a tree
const fileTree = createTree(files)

Expand All @@ -26,7 +26,7 @@ module.exports = (files, ipldResolver, source, callback) => {
const sizeIndex = createSizeIndex(files)

// 3) bottom up flushing
traverse(fileTree, sizeIndex, null, ipldResolver, source, callback)
traverse(fileTree, sizeIndex, null, ipld, source, callback)
}

/*
Expand Down Expand Up @@ -106,13 +106,13 @@ function createSizeIndex (files) {
* If the value is not an object
* add as a link to the dirNode
*/
function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
function traverse (tree, sizeIndex, path, ipld, source, done) {
mapValues(tree, (node, key, cb) => {
if (isLeaf(node)) {
return cb(null, node)
}

traverse(node, sizeIndex, path ? `${path}/${key}` : key, ipldResolver, source, cb)
traverse(node, sizeIndex, path ? `${path}/${key}` : key, ipld, source, cb)
}, (err, tree) => {
if (err) {
return done(err)
Expand All @@ -135,7 +135,7 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
(node, cb) => {
sizeIndex[mh.toB58String(node.multihash)] = node.size

ipldResolver.put(node, {
ipld.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
Expand Down
6 changes: 3 additions & 3 deletions src/importer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const defaultOptions = {
chunker: 'fixed'
}

module.exports = function (ipldResolver, _options) {
module.exports = function (ipld, _options) {
const options = Object.assign({}, defaultOptions, _options)
const Chunker = chunkers[options.chunker]
assert(Chunker, 'Unknkown chunker named ' + options.chunker)
Expand All @@ -39,9 +39,9 @@ module.exports = function (ipldResolver, _options) {
source: pushable()
}

const dagStream = DAGBuilder(Chunker, ipldResolver, options)
const dagStream = DAGBuilder(Chunker, ipld, options)

const treeBuilder = createTreeBuilder(ipldResolver, options)
const treeBuilder = createTreeBuilder(ipld, options)
const treeBuilderStream = treeBuilder.stream()
const pausable = pause(() => {})

Expand Down
4 changes: 2 additions & 2 deletions src/importer/tree-builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ const defaultOptions = {
onlyHash: false
}

function createTreeBuilder (ipldResolver, _options) {
function createTreeBuilder (ipld, _options) {
const options = Object.assign({}, defaultOptions, _options)

const queue = createQueue(consumeQueue, 1)
Expand Down Expand Up @@ -202,7 +202,7 @@ function createTreeBuilder (ipldResolver, _options) {
// don't flush directory unless it's been modified

tree.dirty = false
tree.flush(path, ipldResolver, stream.source, (err, node) => {
tree.flush(path, ipld, stream.source, (err, node) => {
if (err) {
callback(err)
} else {
Expand Down
24 changes: 12 additions & 12 deletions test/builder-dir-sharding.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ chai.use(require('dirty-chai'))
const expect = chai.expect
const mh = require('multihashes')
const BlockService = require('ipfs-block-service')
const IPLDResolver = require('ipld-resolver')
const Ipld = require('ipld')
const pull = require('pull-stream')
const pushable = require('pull-pushable')
const whilst = require('async/whilst')
Expand All @@ -20,11 +20,11 @@ module.exports = (repo) => {
describe('builder: directory sharding', function () {
this.timeout(30 * 1000)

let ipldResolver
let ipld

before(() => {
const bs = new BlockService(repo)
ipldResolver = new IPLDResolver(bs)
ipld = new Ipld(bs)
})

describe('basic dirbuilder', () => {
Expand All @@ -42,7 +42,7 @@ module.exports = (repo) => {
content: pull.values([Buffer.from('i have the best bytes')])
}
]),
importer(ipldResolver, options),
importer(ipld, options),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
expect(nodes.length).to.be.eql(2)
Expand All @@ -67,7 +67,7 @@ module.exports = (repo) => {
content: pull.values([Buffer.from('i have the best bytes')])
}
]),
importer(ipldResolver, options),
importer(ipld, options),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
expect(nodes.length).to.be.eql(2)
Expand All @@ -83,7 +83,7 @@ module.exports = (repo) => {

it('exporting unsharded hash results in the correct files', (done) => {
pull(
exporter(nonShardedHash, ipldResolver),
exporter(nonShardedHash, ipld),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
expect(nodes.length).to.be.eql(2)
Expand All @@ -109,7 +109,7 @@ module.exports = (repo) => {

it('exporting sharded hash results in the correct files', (done) => {
pull(
exporter(shardedHash, ipldResolver),
exporter(shardedHash, ipld),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
expect(nodes.length).to.be.eql(2)
Expand Down Expand Up @@ -142,7 +142,7 @@ module.exports = (repo) => {
const push = pushable()
pull(
push,
importer(ipldResolver),
importer(ipld),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
expect(nodes.length).to.be.eql(maxDirs + 1)
Expand Down Expand Up @@ -179,7 +179,7 @@ module.exports = (repo) => {
const contentEntries = []
const entries = {}
pull(
exporter(rootHash, ipldResolver),
exporter(rootHash, ipld),
pull.asyncMap((node, callback) => {
if (node.content) {
pull(
Expand Down Expand Up @@ -234,7 +234,7 @@ module.exports = (repo) => {
const push = pushable()
pull(
push,
importer(ipldResolver),
importer(ipld),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
const last = nodes[nodes.length - 1]
Expand Down Expand Up @@ -281,7 +281,7 @@ module.exports = (repo) => {
it('exports a big dir', (done) => {
const entries = {}
pull(
exporter(rootHash, ipldResolver),
exporter(rootHash, ipld),
pull.asyncMap((node, callback) => {
if (node.content) {
pull(
Expand Down Expand Up @@ -340,7 +340,7 @@ module.exports = (repo) => {
it('exports a big dir with subpath', (done) => {
const exportHash = mh.toB58String(rootHash) + '/big/big/2000'
pull(
exporter(exportHash, ipldResolver),
exporter(exportHash, ipld),
pull.collect(collected)
)

Expand Down
10 changes: 5 additions & 5 deletions test/builder-only-hash.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@ chai.use(require('dirty-chai'))
const expect = chai.expect
const BlockService = require('ipfs-block-service')
const pull = require('pull-stream')
const IPLDResolver = require('ipld-resolver')
const Ipld = require('ipld')
const CID = require('cids')
const createBuilder = require('../src/builder')
const FixedSizeChunker = require('../src/chunker/fixed-size')

module.exports = (repo) => {
describe('builder: onlyHash', () => {
let ipldResolver
let ipld

before(() => {
const bs = new BlockService(repo)
ipldResolver = new IPLDResolver(bs)
ipld = new Ipld(bs)
})

it('will only chunk and hash if passed an "onlyHash" option', (done) => {
Expand All @@ -27,7 +27,7 @@ module.exports = (repo) => {
const node = nodes[0]
expect(node).to.exist()

ipldResolver.get(new CID(node.multihash), (err, res) => {
ipld.get(new CID(node.multihash), (err, res) => {
expect(err).to.exist()
done()
})
Expand All @@ -45,7 +45,7 @@ module.exports = (repo) => {

pull(
pull.values([inputFile]),
createBuilder(FixedSizeChunker, ipldResolver, options),
createBuilder(FixedSizeChunker, ipld, options),
pull.collect(onCollected)
)
})
Expand Down
Loading

0 comments on commit f4de206

Please sign in to comment.