Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
feat: windows interop (#195)
Browse files Browse the repository at this point in the history
* fix: export files with a POSIX path

* test(with-dag-api): some fixup for windows

It is still failing, because it uses IPFS for testing and IPFS is not yet windows ready.

* chore: run on appveyor

* test: run more tests in the browser

* test: timeout

* test: remove debug code

* test: fix lint issues

* test(browser): are taking forever!!!

* test: skip with-dag-api until #196 is resolved

* test: more timeouts

* Revert "test: run more tests in the browser"

This reverts commit c6f2bff.

* test: more testing issues

* chore: npm ERR! peer dep missing: ajv@^5.0.0, required by [email protected]

* test: lint issues

* test: more issues

* test: follow the standards

* test: [email protected] breaks our tests

* chore: bump deps

* chore: bump timeouts for nodejs 6
  • Loading branch information
richardschneider authored and daviddias committed Nov 10, 2017
1 parent 90dd99a commit aa21ff3
Show file tree
Hide file tree
Showing 13 changed files with 61 additions and 22 deletions.
23 changes: 23 additions & 0 deletions .appveyor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
environment:
matrix:
- nodejs_version: "6"
- nodejs_version: "8"

# cache:
# - node_modules

platform:
- x64

install:
- ps: Install-Product node $env:nodejs_version $env:platform
- npm install

test_script:
- node --version
- npm --version
- npm test

build: off

version: "{build}"
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,12 @@
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme",
"devDependencies": {
"aegir": "^12.1.3",
"ajv": "^5.3.0",
"chai": "^4.1.2",
"dirty-chai": "^2.0.1",
"ipfs": "~0.26.0",
"ipfs-block-service": "~0.13.0",
"ipfs-repo": "~0.18.3",
"ipfs-repo": "0.18.3",
"ncp": "^2.0.0",
"pre-commit": "^1.2.2",
"pull-generate": "^2.2.0",
Expand Down
3 changes: 1 addition & 2 deletions src/exporter/dir-flat.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict'

const path = require('path')
const pull = require('pull-stream')
const paramap = require('pull-paramap')
const CID = require('cids')
Expand All @@ -22,7 +21,7 @@ function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) {
pull.values(node.links),
pull.map((link) => ({
linkName: link.name,
path: path.join(name, link.name),
path: name + '/' + link.name,
hash: link.multihash
})),
pull.filter((item) => accepts === undefined || item.linkName === accepts),
Expand Down
3 changes: 1 addition & 2 deletions src/exporter/dir-hamt-sharded.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict'

const path = require('path')
const pull = require('pull-stream')
const paramap = require('pull-paramap')
const CID = require('cids')
Expand All @@ -25,7 +24,7 @@ function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent
pull.map((link) => {
// remove the link prefix (2 chars for the bucket index)
const p = link.name.substring(2)
const pp = p ? path.join(name, p) : name
const pp = p ? name + '/' + p : name
let accept = true
let fromPathRest = false

Expand Down
3 changes: 1 addition & 2 deletions src/exporter/object.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict'

const path = require('path')
const CID = require('cids')
const pull = require('pull-stream')
const pullDefer = require('pull-defer')
Expand All @@ -10,7 +9,7 @@ module.exports = (node, name, pathRest, ipldResolver, resolve) => {
if (pathRest.length) {
const pathElem = pathRest.shift()
newNode = node[pathElem]
const newName = path.join(name, pathElem)
const newName = name + '/' + pathElem
if (CID.isCID(newNode)) {
const d = pullDefer.source()
ipldResolver.get(sanitizeCID(newNode), (err, newNode) => {
Expand Down
2 changes: 1 addition & 1 deletion test/builder-dir-sharding.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ const leftPad = require('left-pad')

module.exports = (repo) => {
describe('builder: directory sharding', function () {
this.timeout(20 * 1000)
this.timeout(30 * 1000)

let ipldResolver

Expand Down
4 changes: 3 additions & 1 deletion test/exporter-subtree.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ const exporter = unixFSEngine.exporter
const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt')

module.exports = (repo) => {
describe('exporter', () => {
describe('exporter', function () {
this.timeout(10 * 1000)

let ipldResolver

before(() => {
Expand Down
8 changes: 4 additions & 4 deletions test/exporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ module.exports = (repo) => {
fileEql(files[0], bigFile, done)
})
)
})
}).timeout(30 * 1000)

it('export a small file with links using CID instead of multihash', (done) => {
const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q')
Expand All @@ -96,7 +96,7 @@ module.exports = (repo) => {
fileEql(files[0], bigFile, done)
})
)
})
}).timeout(30 * 1000)

it('export a large file > 5mb', (done) => {
const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE'
Expand All @@ -109,7 +109,7 @@ module.exports = (repo) => {
fileEql(files[0], null, done)
})
)
})
}).timeout(30 * 1000)

it('export a directory', (done) => {
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN'
Expand Down Expand Up @@ -149,7 +149,7 @@ module.exports = (repo) => {
)
})
)
})
}).timeout(30 * 1000)

it('returns an empty stream for dir', (done) => {
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
Expand Down
2 changes: 1 addition & 1 deletion test/hamt.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ describe('HAMT', () => {
})

it('can remove all the keys and still find remaining', function (done) {
this.timeout(30 * 1000)
this.timeout(50 * 1000)

masterHead = keys.pop()
iterate()
Expand Down
8 changes: 6 additions & 2 deletions test/import-export-nested-dir.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ module.exports = (repo) => {
ipldResolver = new IPLDResolver(bs)
})

it('imports', (done) => {
it('imports', function (done) {
this.timeout(20 * 1000)

pull(
pull.values([
{ path: 'a/b/c/d/e', content: pull.values([Buffer.from('banana')]) },
Expand Down Expand Up @@ -56,7 +58,9 @@ module.exports = (repo) => {
)
})

it('exports', done => {
it('exports', function (done) {
this.timeout(20 * 1000)

pull(
unixFSEngine.exporter(rootHash, ipldResolver),
pull.collect((err, files) => {
Expand Down
5 changes: 4 additions & 1 deletion test/import-export.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
/* eslint-env mocha */
/* eslint max-nested-callbacks: ["error", 5] */
'use strict'

const chai = require('chai')
Expand Down Expand Up @@ -32,7 +33,9 @@ function fileEql (f1, fileData, callback) {
}

module.exports = (repo) => {
describe('import and export', () => {
describe('import and export', function () {
this.timeout(30 * 1000)

strategies.forEach((strategy) => {
const importerOptions = { strategy: strategy }

Expand Down
2 changes: 1 addition & 1 deletion test/importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ module.exports = (repo) => {
const expected = extend({}, defaultResults, strategies[strategy])

describe('importer: ' + strategy, function () {
this.timeout(20 * 1000)
this.timeout(30 * 1000)

let ipldResolver

Expand Down
17 changes: 13 additions & 4 deletions test/with-dag-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ const pull = require('pull-stream')
const mh = require('multihashes')
const loadFixture = require('aegir/fixtures')
const IPFS = require('ipfs')
const os = require('os')
const path = require('path')

function stringifyMh (files) {
return files.map((file) => {
Expand Down Expand Up @@ -105,7 +107,12 @@ const strategyOverrides = {

}

describe('with dag-api', () => {
describe('with dag-api', function () {
// TODO: waiting for IPFS support on windows, https://github.com/ipfs/js-ipfs-unixfs-engine/issues/196
if (os.platform() === 'win32') {
return
}

strategies.forEach(strategy => {
const baseFiles = strategyBaseFiles[strategy]
const defaultResults = extend({}, baseFiles, {
Expand Down Expand Up @@ -159,7 +166,7 @@ describe('with dag-api', () => {
const expected = extend({}, defaultResults, strategies[strategy])

describe('importer: ' + strategy, function () {
this.timeout(20 * 1000)
this.timeout(50 * 1000)

let node

Expand All @@ -171,9 +178,11 @@ describe('with dag-api', () => {
}
}

before((done) => {
before(function (done) {
this.timeout(30 * 1000)

node = new IPFS({
repo: '/tmp/unixfs-test-' + Math.random(),
repo: path.join(os.tmpdir(), 'unixfs-test-' + Math.random()),
start: false
})

Expand Down

0 comments on commit aa21ff3

Please sign in to comment.