Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

feat: upgrade to latest dag-pb API #88

Merged
merged 4 commits into from
Nov 24, 2016
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 52 additions & 35 deletions src/importer/flush-tree.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ const UnixFS = require('ipfs-unixfs')
const CID = require('cids')
const dagPB = require('ipld-dag-pb')
const mapValues = require('async/mapValues')
const parallel = require('async/parallel')
const series = require('async/series')
const each = require('async/each')

const DAGLink = dagPB.DAGLink
const DAGNode = dagPB.DAGNode
Expand Down Expand Up @@ -121,43 +122,59 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {

const keys = Object.keys(tree)

const ufsDir = new UnixFS('directory')
const node = new DAGNode(ufsDir.marshal())

keys.forEach((key) => {
const b58mh = mh.toB58String(tree[key])
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])
node.addRawLink(link)
})

parallel([
(cb) => node.multihash(cb),
(cb) => node.size(cb)
], (err, res) => {
let n
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we please not use one letter variables if possible? :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍


series([
(cb) => {
const d = new UnixFS('directory')
DAGNode.create(d.marshal(), (err, node) => {
if (err) {
return cb(err)
}
n = node
cb()
})
},
(cb) => {
each(keys, (key, next) => {
const b58mh = mh.toB58String(tree[key])
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])

DAGNode.addLink(n, link, (err, node) => {
if (err) {
return next(err)
}
n = node
next()
})
}, cb)
},
(cb) => {
sizeIndex[mh.toB58String(n.multihash)] = n.size

ipldResolver.put({
node: n,
cid: new CID(n.multihash)
}, (err) => {
if (err) {
source.push(new Error('failed to store dirNode'))
return cb(err)
}
if (path) {
source.push({
path: path,
multihash: n.multihash,
size: n.size
})
}
cb()
})
}
], (err) => {
if (err) {
return done(err)
}

const multihash = res[0]
const size = res[1]

sizeIndex[mh.toB58String(multihash)] = size
ipldResolver.put({
node: node,
cid: new CID(multihash)
}, (err) => {
if (err) {
source.push(new Error('failed to store dirNode'))
} else if (path) {
source.push({
path: path,
multihash: multihash,
size: size
})
}

done(null, multihash)
})
done(null, n.multihash)
})
})
}
Expand Down
151 changes: 76 additions & 75 deletions src/importer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@ const pullWrite = require('pull-write')
const parallel = require('async/parallel')
const dagPB = require('ipld-dag-pb')
const CID = require('cids')
const series = require('async/series')
const each = require('async/each')

const fsc = require('./../chunker/fixed-size')
const createAndStoreTree = require('./flush-tree')

const DAGNode = dagPB.DAGNode
const DAGLink = dagPB.DAGLink

const CHUNK_SIZE = 262144

Expand Down Expand Up @@ -71,49 +72,48 @@ function makeWriter (source, files, ipldResolver) {
}
}

function createAndStoreDir (item, ipldResolver, cb) {
function createAndStoreDir (item, ipldResolver, callback) {
// 1. create the empty dir dag node
// 2. write it to the dag store

const d = new UnixFS('directory')
const n = new DAGNode()
n.data = d.marshal()
let n

n.multihash((err, multihash) => {
if (err) {
return cb(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
}, (err) => {
if (err) {
return cb(err)
}

n.size((err, size) => {
series([
(cb) => {
DAGNode.create(d.marshal(), (err, node) => {
if (err) {
return cb(err)
}

cb(null, {
path: item.path,
multihash: multihash,
size: size
})
n = node
cb()
})
},
(cb) => {
ipldResolver.put({
node: n,
cid: new CID(n.multihash)
}, cb)
}
], (err) => {
if (err) {
return callback(err)
}
callback(null, {
path: item.path,
multihash: n.multihash,
size: n.size
})
})
}

function createAndStoreFile (file, ipldResolver, cb) {
function createAndStoreFile (file, ipldResolver, callback) {
if (Buffer.isBuffer(file.content)) {
file.content = pull.values([file.content])
}

if (typeof file.content !== 'function') {
return cb(new Error('invalid content'))
return callback(new Error('invalid content'))
}

// 1. create the unixfs merkledag node
Expand All @@ -128,44 +128,37 @@ function createAndStoreFile (file, ipldResolver, cb) {
file.content,
fsc(CHUNK_SIZE),
pull.asyncMap((chunk, cb) => {
const l = new UnixFS('file', Buffer(chunk))
const n = new DAGNode(l.marshal())
const l = new UnixFS('file', new Buffer(chunk))

n.multihash((err, multihash) => {
DAGNode.create(l.marshal(), (err, node) => {
if (err) {
return cb(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
node: node,
cid: new CID(node.multihash)
}, (err) => {
if (err) {
return cb(new Error('Failed to store chunk'))
return cb(err)
}

n.size((err, size) => {
if (err) {
return cb(err)
}

cb(null, {
Hash: multihash,
Size: size,
leafSize: l.fileSize(),
Name: ''
})
cb(null, {
Hash: node.multihash,
Size: node.size,
leafSize: l.fileSize(),
Name: ''
})
})
})
}),
pull.collect((err, leaves) => {
if (err) {
return cb(err)
return callback(err)
}

if (leaves.length === 1) {
return cb(null, {
return callback(null, {
path: file.path,
multihash: leaves[0].Hash,
size: leaves[0].Size
Expand All @@ -175,41 +168,49 @@ function createAndStoreFile (file, ipldResolver, cb) {
// create a parent node and add all the leafs

const f = new UnixFS('file')
const n = new DAGNode()

for (let leaf of leaves) {
f.addBlockSize(leaf.leafSize)
n.addRawLink(
new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
)
}
let n

n.data = f.marshal()

n.multihash((err, multihash) => {
if (err) {
return cb(err)
}

ipldResolver.put({
node: n,
cid: new CID(multihash)
}, (err) => {
if (err) {
return cb(err)
}

n.size((err, size) => {
series([
(cb) => {
DAGNode.create(f.marshal(), (err, node) => {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is here where I'm failing, I'm passing the f.marshal() before I do the addBlockSize

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok, this was one bit, now only missing 10 bytes

if (err) {
return cb(err)
}

cb(null, {
path: file.path,
multihash: multihash,
size: size
})
n = node
cb()
})
},
(cb) => {
each(leaves, (leaf, next) => {
f.addBlockSize(leaf.leafSize)
DAGNode.addLink(n, {
name: leaf.Name,
size: leaf.Size,
multihash: leaf.Hash
}, (err, node) => {
if (err) {
return next(err)
}
n = node
next()
})
}, cb)
},
(cb) => {
ipldResolver.put({
node: n,
cid: new CID(n.multihash)
}, cb)
}
], (err) => {
if (err) {
return callback(err)
}

callback(null, {
path: file.path,
multihash: n.multihash,
size: n.size
})
})
})
Expand Down
4 changes: 2 additions & 2 deletions test/test-importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ function stringifyMh (files) {
const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt')
const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt')

module.exports = function (repo) {
describe('importer', function () {
module.exports = (repo) => {
describe('importer', () => {
let ipldResolver

before(() => {
Expand Down