Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

feat: upgrade to latest dag-pb API #88

Merged
merged 4 commits into from
Nov 24, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,15 @@
"rimraf": "^2.5.4"
},
"dependencies": {
"async": "^2.1.2",
"async": "^2.1.4",
"cids": "^0.2.0",
"ipfs-unixfs": "^0.1.5",
"ipld-dag-pb": "^0.8.0",
"ipld-resolver": "^0.2.0",
"ipld-dag-pb": "^0.9.1",
"ipld-resolver": "^0.3.0",
"is-ipfs": "^0.2.1",
"multihashes": "^0.2.2",
"pull-block": "^1.0.2",
"pull-paramap": "^1.2.0",
"pull-paramap": "^1.2.1",
"pull-pushable": "^2.0.1",
"pull-stream": "^3.5.0",
"pull-traverse": "^1.0.3",
Expand Down
2 changes: 1 addition & 1 deletion src/exporter/dir.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ module.exports = (node, name, ipldResolver) => {
pull.values(node.links),
pull.map((link) => ({
path: path.join(name, link.name),
hash: link.hash
hash: link.multihash
})),
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
if (err) {
Expand Down
2 changes: 1 addition & 1 deletion src/exporter/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ module.exports = (node, name, ipldResolver) => {
function visitor (node) {
return pull(
pull.values(node.links),
paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb))
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb))
)
}

Expand Down
58 changes: 25 additions & 33 deletions src/importer/flush-tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ const UnixFS = require('ipfs-unixfs')
const CID = require('cids')
const dagPB = require('ipld-dag-pb')
const mapValues = require('async/mapValues')
const parallel = require('async/parallel')

const waterfall = require('async/waterfall')
const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode

Expand Down Expand Up @@ -120,44 +119,37 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
// return this new node multihash

const keys = Object.keys(tree)

const ufsDir = new UnixFS('directory')
const node = new DAGNode(ufsDir.marshal())

keys.forEach((key) => {
const dir = new UnixFS('directory')
const links = keys.map((key) => {
const b58mh = mh.toB58String(tree[key])
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])
node.addRawLink(link)
return new DAGLink(key, sizeIndex[b58mh], tree[key])
})

parallel([
(cb) => node.multihash(cb),
(cb) => node.size(cb)
], (err, res) => {
waterfall([
(cb) => DAGNode.create(dir.marshal(), links, cb),
(node, cb) => {
sizeIndex[mh.toB58String(node.multihash)] = node.size

ipldResolver.put({
node: node,
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
], (err, node) => {
if (err) {
source.push(new Error('failed to store dirNode'))
return done(err)
}

const multihash = res[0]
const size = res[1]

sizeIndex[mh.toB58String(multihash)] = size
ipldResolver.put({
node: node,
cid: new CID(multihash)
}, (err) => {
if (err) {
source.push(new Error('failed to store dirNode'))
} else if (path) {
source.push({
path: path,
multihash: multihash,
size: size
})
}

done(null, multihash)
})
if (path) {
source.push({
path: path,
multihash: node.multihash,
size: node.size
})
}

done(null, node.multihash)
})
})
}
Expand Down
123 changes: 47 additions & 76 deletions src/importer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const pullWrite = require('pull-write')
const parallel = require('async/parallel')
const dagPB = require('ipld-dag-pb')
const CID = require('cids')
const waterfall = require('async/waterfall')

const fsc = require('./../chunker/fixed-size')
const createAndStoreTree = require('./flush-tree')
Expand Down Expand Up @@ -71,49 +72,38 @@ function makeWriter (source, files, ipldResolver) {
}
}

function createAndStoreDir (item, ipldResolver, cb) {
function createAndStoreDir (item, ipldResolver, callback) {
// 1. create the empty dir dag node
// 2. write it to the dag store

const d = new UnixFS('directory')
const n = new DAGNode()
n.data = d.marshal()

n.multihash((err, multihash) => {
waterfall([
(cb) => DAGNode.create(d.marshal(), cb),
(node, cb) => {
ipldResolver.put({
node: node,
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
], (err, node) => {
if (err) {
return cb(err)
return callback(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
}, (err) => {
if (err) {
return cb(err)
}

n.size((err, size) => {
if (err) {
return cb(err)
}

cb(null, {
path: item.path,
multihash: multihash,
size: size
})
})
callback(null, {
path: item.path,
multihash: node.multihash,
size: node.size
})
})
}

function createAndStoreFile (file, ipldResolver, cb) {
function createAndStoreFile (file, ipldResolver, callback) {
if (Buffer.isBuffer(file.content)) {
file.content = pull.values([file.content])
}

if (typeof file.content !== 'function') {
return cb(new Error('invalid content'))
return callback(new Error('invalid content'))
}

// 1. create the unixfs merkledag node
Expand All @@ -128,88 +118,69 @@ function createAndStoreFile (file, ipldResolver, cb) {
file.content,
fsc(CHUNK_SIZE),
pull.asyncMap((chunk, cb) => {
const l = new UnixFS('file', Buffer(chunk))
const n = new DAGNode(l.marshal())
const l = new UnixFS('file', new Buffer(chunk))

n.multihash((err, multihash) => {
DAGNode.create(l.marshal(), (err, node) => {
if (err) {
return cb(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
node: node,
cid: new CID(node.multihash)
}, (err) => {
if (err) {
return cb(new Error('Failed to store chunk'))
return cb(err)
}

n.size((err, size) => {
if (err) {
return cb(err)
}

cb(null, {
Hash: multihash,
Size: size,
leafSize: l.fileSize(),
Name: ''
})
cb(null, {
Hash: node.multihash,
Size: node.size,
leafSize: l.fileSize(),
Name: ''
})
})
})
}),
pull.collect((err, leaves) => {
if (err) {
return cb(err)
return callback(err)
}

if (leaves.length === 1) {
return cb(null, {
return callback(null, {
path: file.path,
multihash: leaves[0].Hash,
size: leaves[0].Size
})
}

// create a parent node and add all the leafs

const f = new UnixFS('file')
const n = new DAGNode()

for (let leaf of leaves) {
const links = leaves.map((leaf) => {
f.addBlockSize(leaf.leafSize)
n.addRawLink(
new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
)
}

n.data = f.marshal()
return new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
})

n.multihash((err, multihash) => {
waterfall([
(cb) => DAGNode.create(f.marshal(), links, cb),
(node, cb) => {
ipldResolver.put({
node: node,
cid: new CID(node.multihash)
}, (err) => cb(err, node))
}
], (err, node) => {
if (err) {
return cb(err)
return callback(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
}, (err) => {
if (err) {
return cb(err)
}

n.size((err, size) => {
if (err) {
return cb(err)
}

cb(null, {
path: file.path,
multihash: multihash,
size: size
})
})
callback(null, {
path: file.path,
multihash: node.multihash,
size: node.size
})
})
})
Expand Down
4 changes: 2 additions & 2 deletions test/test-importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ function stringifyMh (files) {
const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt')
const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt')

module.exports = function (repo) {
describe('importer', function () {
module.exports = (repo) => {
describe('importer', () => {
let ipldResolver

before(() => {
Expand Down