diff --git a/.eslintrc.yaml b/.eslintrc.yaml index ba6334b0346..1362c9cb29e 100644 --- a/.eslintrc.yaml +++ b/.eslintrc.yaml @@ -97,7 +97,7 @@ rules: key-spacing: [2, {mode: minimum}] keyword-spacing: 2 linebreak-style: [2, unix] - max-len: [2, 80, 2] + max-len: [2, {code: 80, ignoreUrls: true, tabWidth: 2}] new-parens: 2 no-mixed-spaces-and-tabs: 2 no-multiple-empty-lines: [2, {max: 2, maxEOF: 0, maxBOF: 0}] diff --git a/.mailmap b/.mailmap index 36324c6aa2a..722995b6b04 100644 --- a/.mailmap +++ b/.mailmap @@ -143,7 +143,7 @@ San-Tai Hsu Scott Blomquist Sergey Kryzhanovsky Shannen Saez -Shigeki Ohtsu +Shigeki Ohtsu Siddharth Mahendraker Simon Willison Stanislav Opichal diff --git a/BUILDING.md b/BUILDING.md index 13f4e34734a..b325972f4b6 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -8,21 +8,86 @@ If you consistently can reproduce a test failure, search for it in the [Node.js issue tracker](https://github.com/nodejs/node/issues) or file a new issue. +## Supported platforms + +This list of supported platforms is current as of the branch / release to +which it is attached. + +### Input + +Node.js relies on V8 and libuv. Therefore, we adopt a subset of their +supported platforms. + +### Strategy + +Support is divided into three tiers: + +* **Tier 1**: Full test coverage and maintenance by the Node.js core team and + the broader community. +* **Tier 2**: Full test coverage but more limited maintenance, + often provided by the vendor of the platform. +* **Experimental**: Known to compile but not necessarily reliably or with + a full passing test suite. These are often working to be promoted to Tier + 2 but are not quite ready. There is at least one individual actively + providing maintenance and the team is striving to broaden quality and + reliability of support. + +### Supported platforms + +| System | Support type | Version | Architectures | Notes | +|--------------|--------------|----------------------------------|----------------------|------------------| +| GNU/Linux | Tier 1 | kernel >= 2.6.18, glibc >= 2.5 | x86, x64, arm, arm64 | | +| macOS | Tier 1 | >= 10.10 | x64 | | +| Windows | Tier 1 | >= Windows 7 or >= Windows2008R2 | x86, x64 | | +| SmartOS | Tier 2 | >= 15 < 16.4 | x86, x64 | see note1 | +| FreeBSD | Tier 2 | >= 10 | x64 | | +| GNU/Linux | Tier 2 | kernel >= 4.2.0, glibc >= 2.19 | ppc64be | | +| GNU/Linux | Tier 2 | kernel >= 3.13.0, glibc >= 2.19 | ppc64le | | +| AIX | Tier 2 | >= 6.1 TL09 | ppc64be | | +| GNU/Linux | Tier 2 | kernel >= 3.10, glibc >= 2.17 | s390x | | +| macOS | Experimental | >= 10.8 < 10.10 | x64 | no test coverage | +| Linux (musl) | Experimental | musl >= 1.0 | x64 | | + +note1 - The gcc4.8-libs package needs to be installed, because node + binaries have been built with GCC 4.8, for which runtime libraries are not + installed by default. For these node versions, the recommended binaries + are the ones available in pkgsrc, not the one available from nodejs.org. + Note that the binaries downloaded from the pkgsrc repositories are not + officially supported by the Node.js project, and instead are supported + by Joyent. SmartOS images >= 16.4 are not supported because + GCC 4.8 runtime libraries are not available in their pkgsrc repository + +### Supported toolchains + +Depending on host platform, the selection of toolchains may vary. + +#### Unix + +* GCC 4.8.5 or newer +* Clang 3.4.1 or newer + +#### Windows + +* Building Node: Visual Studio 2015 or Visual C++ Build Tools 2015 or newer +* Building native add-ons: Visual Studio 2013 or Visual C++ Build Tools 2015 + or newer + +## Building Node.js on supported platforms ### Unix / OS X Prerequisites: * `gcc` and `g++` 4.8.5 or newer, or -* `clang` and `clang++` 3.4 or newer +* `clang` and `clang++` 3.4.1 or newer * Python 2.6 or 2.7 * GNU Make 3.81 or newer On OS X, you will also need: * [Xcode](https://developer.apple.com/xcode/download/) - * You also need to install the `Command Line Tools` via Xcode. You can find + - You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads` - * This step will install `gcc` and the related toolchain containing `make` + - This step will install `gcc` and the related toolchain containing `make` * After building, you may want to setup [firewall rules](tools/macosx-firewall.sh) to avoid popups asking to accept incoming network connections when running tests: @@ -51,7 +116,8 @@ the `-j4` flag. See the [GNU Make Documentation](https://www.gnu.org/software/make/manual/html_node/Parallel.html) for more information. -Note that the above requires that `python` resolve to Python 2.6 or 2.7 and not a newer version. +Note that the above requires that `python` resolve to Python 2.6 or 2.7 +and not a newer version. To run the tests: @@ -252,9 +318,11 @@ It is possible to build Node.js with **Note**: building in this way does **not** allow you to claim that the runtime is FIPS 140-2 validated. Instead you can indicate that the runtime -uses a validated module. See the [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) +uses a validated module. See the +[security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) page 60 for more details. In addition, the validation for the underlying module -is only valid if it is deployed in accordance with its [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf). +is only valid if it is deployed in accordance with its +[security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf). If you need FIPS validated cryptography it is recommended that you read both the [security policy](http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf) and [user guide](https://openssl.org/docs/fips/UserGuide-2.0.pdf). diff --git a/CHANGELOG.md b/CHANGELOG.md index 49d686356e7..cbc244efe0a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,7 +28,8 @@ release. -7.7.3
+7.7.4
+7.7.3
7.7.2
7.7.1
7.7.0
@@ -42,7 +43,8 @@ release. 7.0.0
-6.10.0
+6.10.1
+6.10.0
6.9.5
6.9.4
6.9.3
@@ -84,7 +86,8 @@ release. 5.0.0
-4.8.0
+4.8.1
+4.8.0
4.7.3
4.7.2
4.7.1
diff --git a/android-configure b/android-configure index 1dc238ebd0c..59f0a40c1ac 100755 --- a/android-configure +++ b/android-configure @@ -38,13 +38,26 @@ case $ARCH in ;; esac +NDK_PATH=$1 +function make_toolchain { + $NDK_PATH/build/tools/make-standalone-toolchain.sh \ + --toolchain=$TOOLCHAIN_NAME-$CC_VER \ + --arch=$ARCH \ + --install-dir=$TOOLCHAIN \ + --platform=android-21 +} + export TOOLCHAIN=$PWD/android-toolchain -mkdir -p $TOOLCHAIN -$1/build/tools/make-standalone-toolchain.sh \ - --toolchain=$TOOLCHAIN_NAME-$CC_VER \ - --arch=$ARCH \ - --install-dir=$TOOLCHAIN \ - --platform=android-21 +if [ -d "$TOOLCHAIN" ]; then + read -r -p "NDK toolchain already exists. Replace it? [y/N]" response + case "$response" in + [Yy]) + rm -rf "$TOOLCHAIN" + make_toolchain + esac +else + make_toolchain +fi export PATH=$TOOLCHAIN/bin:$PATH export AR=$TOOLCHAIN/bin/$SUFFIX-ar export CC=$TOOLCHAIN/bin/$SUFFIX-gcc diff --git a/benchmark/_benchmark_progress.js b/benchmark/_benchmark_progress.js index 4b42248f246..4afae2f77d4 100644 --- a/benchmark/_benchmark_progress.js +++ b/benchmark/_benchmark_progress.js @@ -104,7 +104,7 @@ class BenchmarkProgress { `| ${fraction(completedFiles, scheduledFiles)} files ` + `| ${fraction(completedRunsForFile, runsPerFile)} runs ` + `| ${fraction(completedConfig, scheduledConfig)} configs]` + - `: ${caption}`; + `: ${caption} `; } updateProgress(finished) { diff --git a/benchmark/common.js b/benchmark/common.js index 7ce180fdb7f..24369f2f543 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -38,7 +38,7 @@ function Benchmark(fn, configs, options) { } Benchmark.prototype._parseArgs = function(argv, configs) { - const cliOptions = Object.assign({}, configs); + const cliOptions = {}; const extraOptions = {}; // Parse configuration arguments for (const arg of argv) { @@ -47,17 +47,20 @@ Benchmark.prototype._parseArgs = function(argv, configs) { console.error('bad argument: ' + arg); process.exit(1); } + const config = match[1]; - if (configs[match[1]]) { + if (configs[config]) { // Infer the type from the config object and parse accordingly - const isNumber = typeof configs[match[1]][0] === 'number'; + const isNumber = typeof configs[config][0] === 'number'; const value = isNumber ? +match[2] : match[2]; - cliOptions[match[1]] = [value]; + if (!cliOptions[config]) + cliOptions[config] = []; + cliOptions[config].push(value); } else { - extraOptions[match[1]] = match[2]; + extraOptions[config] = match[2]; } } - return { cli: cliOptions, extra: extraOptions }; + return { cli: Object.assign({}, configs, cliOptions), extra: extraOptions }; }; Benchmark.prototype._queue = function(options) { diff --git a/benchmark/fs/bench-realpathSync.js b/benchmark/fs/bench-realpathSync.js index ae1c78d30d1..bf1a38a746e 100644 --- a/benchmark/fs/bench-realpathSync.js +++ b/benchmark/fs/bench-realpathSync.js @@ -3,6 +3,8 @@ const common = require('../common'); const fs = require('fs'); const path = require('path'); + +process.chdir(__dirname); const resolved_path = path.resolve(__dirname, '../../lib/'); const relative_path = path.relative(__dirname, '../../lib/'); diff --git a/benchmark/module/module-loader.js b/benchmark/module/module-loader.js index 090f7b78549..d7e03cfee4d 100644 --- a/benchmark/module/module-loader.js +++ b/benchmark/module/module-loader.js @@ -35,6 +35,8 @@ function main(conf) { measureFull(n, conf.useCache === 'true'); else measureDir(n, conf.useCache === 'true'); + + rmrf(tmpDirectory); } function measureFull(n, useCache) { diff --git a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js index 86714df6c19..b4a80af4e5e 100644 --- a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js +++ b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js @@ -7,7 +7,7 @@ const inputs = require('../fixtures/url-inputs.js').searchParams; const bench = common.createBenchmark(main, { type: Object.keys(inputs), method: ['legacy', 'whatwg'], - n: [1e5] + n: [1e6] }); function useLegacy(n, input) { diff --git a/benchmark/url/whatwg-url-idna.js b/benchmark/url/whatwg-url-idna.js index 41b4c639de9..3d0ea3dc8fe 100644 --- a/benchmark/url/whatwg-url-idna.js +++ b/benchmark/url/whatwg-url-idna.js @@ -37,8 +37,6 @@ function main(conf) { const input = inputs[conf.input][to]; const method = to === 'ascii' ? domainToASCII : domainToUnicode; - common.v8ForceOptimization(method, input); - bench.start(); for (var i = 0; i < n; i++) { method(input); diff --git a/benchmark/vm/run-in-context.js b/benchmark/vm/run-in-context.js index 62ebe29146e..d1d4b5a7abb 100644 --- a/benchmark/vm/run-in-context.js +++ b/benchmark/vm/run-in-context.js @@ -23,8 +23,6 @@ function main(conf) { const contextifiedSandbox = vm.createContext(); - common.v8ForceOptimization(vm.runInContext, - '0', contextifiedSandbox, options); bench.start(); for (; i < n; i++) vm.runInContext('0', contextifiedSandbox, options); diff --git a/benchmark/vm/run-in-this-context.js b/benchmark/vm/run-in-this-context.js index f66fd31a1a9..f3a7e969287 100644 --- a/benchmark/vm/run-in-this-context.js +++ b/benchmark/vm/run-in-this-context.js @@ -21,7 +21,6 @@ function main(conf) { var i = 0; - common.v8ForceOptimization(vm.runInThisContext, '0', options); bench.start(); for (; i < n; i++) vm.runInThisContext('0', options); diff --git a/deps/npm/.mailmap b/deps/npm/.mailmap index abd5c5d4a24..85eed6222c7 100644 --- a/deps/npm/.mailmap +++ b/deps/npm/.mailmap @@ -21,7 +21,7 @@ Evan Lucas Evan Lucas Faiq Raza Forbes Lindesay -Forrest L Norvell +Forrest L Norvell Gabriel Barros Geoff Flarity Gregers Gram Rygg diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS index c91ce4a2c61..f267e5024fd 100644 --- a/deps/npm/AUTHORS +++ b/deps/npm/AUTHORS @@ -446,3 +446,7 @@ Ville Lahdenvuo Natalie Wolfe Andrew Schmadel Jonah Moses +Daijirō Wachi +Dmitry Litvinchenko +chocolateboy +Henry Zhu diff --git a/deps/npm/CHANGELOG.md b/deps/npm/CHANGELOG.md index d879e7584a7..6d1ccc252bd 100644 --- a/deps/npm/CHANGELOG.md +++ b/deps/npm/CHANGELOG.md @@ -1,3 +1,117 @@ +### v4.2.0 (2017-01-26): + +Hi all! I'm Kat, and I'm currently sitting in a train traveling at ~300km/h +through Spain. So clearly, this release should have *something* to do with +speed. And it does! Heck, with this release, you could say we're really +_blazing_, even. 🌲🔥😏 + +#### IMPROVED CLI SEARCH~ + +You might recall if you've been keeping up that one of the reasons for a +semver-major bump to `npm@4` was an improved CLI search (read: no longer blowing +up Node). The work done for that new search system, while still relying on a +full metadata download and local search, was also meant to act as groundwork for +then-ongoing work on a brand-new, smarter search system for npm. Shortly after +`npm@4` came out, the bulk of the server-side work was done, and with this +release, the npm CLI has integrated use of the new endpoint for high-quality, +fast-turnaround searches. + +No, seriously, it's *fast*. And *relevant*: + +[![GOTTA GO FAST! This is a gif of the new npm search returning results in around a second for `npm search web framework`.](https://cloud.githubusercontent.com/assets/17535/21954136/f007e8be-d9fd-11e6-9231-f899c12790e0.gif)](https://github.com/npm/npm/pull/15481) + +Give it a shot! And remember to check out the new website version of the search, +too, which uses the same backend as the CLI now. 🎉 + +Incidentally, the backend is a public service, so you can write your own search +tools, be they web-based, CLI, or GUI-based. You can read up on the [full +documentation for the search +endpoint](https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#get-v1search), +and let us know about the cool things you come up with! + +* [`ce3ca51`](https://github.com/npm/npm/commit/ce3ca51ca2d60e15e901c8bf6256338e53e1eca2) + [#15481](https://github.com/npm/npm/pull/15481) + Add an internal `gunzip-maybe` utility for optional gunzipping. + ([@zkat](https://github.com/zkat)) +* [`e322932`](https://github.com/npm/npm/commit/e3229324d507fda10ea9e94fd4de8a4ae5025c75) [`a53055e`](https://github.com/npm/npm/commit/a53055e423f1fe168f05047aa0dfec6d963cb211) [`a1f4365`](https://github.com/npm/npm/commit/a1f436570730c6e4a173ca92d1967a87c29b7f2d) [`c56618c`](https://github.com/npm/npm/commit/c56618c62854ea61f6f716dffe7bcac80b5f4144) + [#15481](https://github.com/npm/npm/pull/15481) + Add support for using the new npm search endpoint for fast, quality search + results. Includes a fallback to "classic" search. + ([@zkat](https://github.com/zkat)) + +#### WHERE DID THE DEBUG LOGS GO + +This is another pretty significant change: Usually, when the npm process +crashed, you would get an `npm-debug.log` in your current working directory. +This debug log would get cleared out as soon as you ran npm again. This was a +bit annoying because 1) you would get a random file in your `git status` that +you might accidentally commit, and 2) if you hit a hard-to-reproduce bug and +instinctually tried again, you would no longer have access to the repro +`npm-debug.log`. + +So now, any time a crash happens, we'll save your debug logs to your cache +folder, under `_logs` (`~/.npm` on *nix, by default -- use `npm config get +cache` to see what your current value is). The cache will now hold a +(configurable) number of `npm-debug.log` files, which you can access in the +future. Hopefully this will help clean stuff up and reduce frustration from +missed repros! In the future, this will also be used by `npm report` to make it +super easy to put up issues about crashes you run into with npm. 💃🕺🏿👯‍♂️ + +* [`04fca22`](https://github.com/npm/npm/commit/04fca223a0f704b69340c5f81b26907238fad878) + [#11439](https://github.com/npm/npm/pull/11439) + Put debug logs in `$(npm get cache)/_logs` and store multiple log files. + ([@KenanY](https://github.com/KenanY)) + ([@othiym23](https://github.com/othiym23)) + ([@isaacs](https://github.com/isaacs)) + ([@iarna](https://github.com/iarna)) + +#### DOCS + +* [`ae8e71c`](https://github.com/npm/npm/commit/ae8e71c2b7d64d782af287a21e146d7cea6e5273) + [#15402](https://github.com/npm/npm/pull/15402) + Add missing backtick in one of the `npm doctor` messages. + ([@watilde](https://github.com/watilde), [@charlotteis](https://github.com/charlotteis)) +* [`821fee6`](https://github.com/npm/npm/commit/821fee6d0b12a324e035c397ae73904db97d07d2) + [#15480](https://github.com/npm/npm/pull/15480) + Clarify that unscoped packages can depend on scoped packages and vice-versa. + ([@chocolateboy](https://github.com/chocolateboy)) +* [`2ee45a8`](https://github.com/npm/npm/commit/2ee45a884137ae0706b7c741c671fef2cb3bac96) + [#15515](https://github.com/npm/npm/pull/15515) + Update minimum supported Node version number in the README to `node@>=4`. + ([@watilde](https://github.com/watilde)) +* [`af06aa9`](https://github.com/npm/npm/commit/af06aa9a357578a8fd58c575f3dbe55bc65fc376) + [#15520](https://github.com/npm/npm/pull/15520) + Add section to `npm-scope` docs to explain that scope owners will own scoped + packages with that scope. That is, user `@alice` is not allowed to publish to + `@bob/my-package` unless explicitly made an owner by user (or org) `@bob`. + ([@hzoo](https://github.com/hzoo)) +* [`bc892e6`](https://github.com/npm/npm/commit/bc892e6d07a4c6646480703641a4d71129c38b6d) + [#15539](https://github.com/npm/npm/pull/15539) + Replace `http` with `https` and fix typos in some docs. + ([@watilde](https://github.com/watilde)) +* [`1dfe875`](https://github.com/npm/npm/commit/1dfe875b9ac61a0ab9f61a2eab02bacf6cce583c) + [#15545](https://github.com/npm/npm/pull/15545) + Update Node.js download link to point to the right place. + ([@watilde](https://github.com/watilde)) + +#### DEPENDENCIES + + * [`b824bfb`](https://github.com/npm/npm/commit/b824bfbeb2d89c92762e9170b026af98b5a3668a) + `ansi-regex@2.1.1` + * [`81ea3e8`](https://github.com/npm/npm/commit/81ea3e8e4ea34cd9c2b418512dcb508abcee1380) + `mississippi@1.3.0` + +#### MISC + +* [`98df212`](https://github.com/npm/npm/commit/98df212a91fd6ff4a02b9cd247f4166f93d3977a) + [#15492](https://github.com/npm/npm/pull/15492) + Update the "master" node version used for AppVeyor to `node@7`. + ([@watilde](https://github.com/watilde)) +* [`d75fc03`](https://github.com/npm/npm/commit/d75fc03eda5364f12ac266fa4f66e31c2e44e864) + [#15413](https://github.com/npm/npm/pull/15413) + `npm run-script` now exits with the child process' exit code on exit. + ([@kapals](https://github.com/kapals)) + ### v4.1.2 (2017-01-12) We have a twee little release this week as we come back from the holidays. diff --git a/deps/npm/README.md b/deps/npm/README.md index eedf76b6b10..2237414a545 100644 --- a/deps/npm/README.md +++ b/deps/npm/README.md @@ -9,9 +9,9 @@ Much more info available via `npm help` once it's installed. ## IMPORTANT -**You need node v0.10 or higher to run this program.** +**You need node v4 or higher to run this program.** -To install an old **and unsupported** version of npm that works on node 0.3 +To install an old **and unsupported** version of npm that works on node v0.12 and prior, clone the git repo and dig through the old tags and branches. **npm is configured to use npm, Inc.'s public package registry at @@ -84,7 +84,7 @@ for testing, or running stuff without actually installing npm itself.) Many improvements for Windows users have been made in npm 3 - you will have a better experience if you run a recent version of npm. To upgrade, either use [Microsoft's upgrade tool](https://github.com/felixrieseberg/npm-windows-upgrade), -[download a new version of Node](http://nodejs.org/download/), +[download a new version of Node](https://nodejs.org/en/download/), or follow the Windows upgrade instructions in the [npm Troubleshooting Guide](https://github.com/npm/npm/wiki/Troubleshooting#upgrading-on-windows). diff --git a/deps/npm/appveyor.yml b/deps/npm/appveyor.yml index 1dd58c7b78a..d808b2dbcca 100644 --- a/deps/npm/appveyor.yml +++ b/deps/npm/appveyor.yml @@ -5,7 +5,7 @@ environment: # previous LTS is next most important - nodejs_version: "4" # then master - - nodejs_version: "5" + - nodejs_version: "7" COVERALLS_REPO_TOKEN: secure: XdC0aySefK0HLh1GNk6aKrzZPbCfPQLyA4mYtFGEp4DrTuZA/iuCUS0LDqFYO8JQ platform: diff --git a/deps/npm/doc/files/package.json.md b/deps/npm/doc/files/package.json.md index 37e7688c5f0..33f30116417 100644 --- a/deps/npm/doc/files/package.json.md +++ b/deps/npm/doc/files/package.json.md @@ -119,7 +119,7 @@ array of license objects: // Not valid metadata { "license" : { "type" : "ISC" - , "url" : "http://opensource.org/licenses/ISC" + , "url" : "https://opensource.org/licenses/ISC" } } @@ -127,10 +127,10 @@ array of license objects: { "licenses" : [ { "type": "MIT" - , "url": "http://www.opensource.org/licenses/mit-license.php" + , "url": "https://www.opensource.org/licenses/mit-license.php" } , { "type": "Apache-2.0" - , "url": "http://opensource.org/licenses/apache2.0.php" + , "url": "https://opensource.org/licenses/apache2.0.php" } ] } diff --git a/deps/npm/doc/misc/npm-config.md b/deps/npm/doc/misc/npm-config.md index e93e42a3fb7..cafbedf5321 100644 --- a/deps/npm/doc/misc/npm-config.md +++ b/deps/npm/doc/misc/npm-config.md @@ -576,6 +576,13 @@ stderr. If the `color` config is set to true, then this stream will receive colored output if it is a TTY. +### logs-max + +* Default: 10 +* Type: Number + +The maximum number of log files to store. + ### long * Default: false @@ -844,12 +851,21 @@ Space-separated options that limit the results from search. Space-separated options that are always passed to search. +### searchlimit + +* Default: 20 +* Type: Number + +Number of items to limit search results to. Will not apply at all to legacy +searches. + ### searchstaleness * Default: 900 (15 minutes) * Type: Number -The age of the cache, in seconds, before another registry request is made. +The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint. ### send-metrics diff --git a/deps/npm/doc/misc/npm-scope.md b/deps/npm/doc/misc/npm-scope.md index e9d4d025c24..940c1dbb58c 100644 --- a/deps/npm/doc/misc/npm-scope.md +++ b/deps/npm/doc/misc/npm-scope.md @@ -4,30 +4,35 @@ npm-scope(7) -- Scoped packages ## DESCRIPTION All npm packages have a name. Some package names also have a scope. A scope -follows the usual rules for package names (url-safe characters, no leading dots -or underscores). When used in package names, preceded by an @-symbol and -followed by a slash, e.g. +follows the usual rules for package names (URL-safe characters, no leading dots +or underscores). When used in package names, scopes are preceded by an `@` symbol +and followed by a slash, e.g. @somescope/somepackagename Scopes are a way of grouping related packages together, and also affect a few things about the way npm treats the package. +Each npm user/organization has their own scope, and only you can add packages +in your scope. This means you don't have to worry about someone taking your +package name ahead of you. Thus it is also a good way to signal official packages +for organizations. + Scoped packages can be published and installed as of `npm@2` and are supported -by the primary npm registry. The npm client is backwards-compatible with -un-scoped registries, so it can be used to work with scoped and un-scoped -registries at the same time. +by the primary npm registry. Unscoped packages can depend on scoped packages and +vice versa. The npm client is backwards-compatible with unscoped registries, +so it can be used to work with scoped and unscoped registries at the same time. ## Installing scoped packages Scoped packages are installed to a sub-folder of the regular installation folder, e.g. if your other packages are installed in `node_modules/packagename`, -scoped modules will be in `node_modules/@myorg/packagename`. The scope folder -(`@myorg`) is simply the name of the scope preceded by an @-symbol, and can +scoped modules will be installed in `node_modules/@myorg/packagename`. The scope +folder (`@myorg`) is simply the name of the scope preceded by an `@` symbol, and can contain any number of scoped packages. A scoped package is installed by referencing it by name, preceded by an -@-symbol, in `npm install`: +`@` symbol, in `npm install`: npm install @myorg/mypackage @@ -37,7 +42,7 @@ Or in `package.json`: "@myorg/mypackage": "^1.3.0" } -Note that if the @-symbol is omitted in either case npm will instead attempt to +Note that if the `@` symbol is omitted, in either case, npm will instead attempt to install from GitHub; see `npm-install(1)`. ## Requiring scoped packages @@ -47,8 +52,8 @@ include the name of the scope when requiring them in your code, e.g. require('@myorg/mypackage') -There is nothing special about the way Node treats scope folders, this is -just specifying to require the module `mypackage` in the folder called `@myorg`. +There is nothing special about the way Node treats scope folders. This +simply requires the `mypackage` module in the folder named `@myorg`. ## Publishing scoped packages @@ -57,14 +62,14 @@ published to any registry that supports them, including the primary npm registry. (As of 2015-04-19, and with npm 2.0 or better, the primary npm registry -**does** support scoped packages) +**does** support scoped packages.) If you wish, you may associate a scope with a registry; see below. ### Publishing public scoped packages to the primary npm registry To publish a public scoped package, you must specify `--access public` with -the initial publication. This will publish the package and set access +the initial publication. This will publish the package and set access to `public` as if you had run `npm access public` after publishing. ### Publishing private scoped packages to the npm registry @@ -75,7 +80,7 @@ account. You can then publish the module with `npm publish` or `npm publish --access restricted`, and it will be present in the npm registry, with -restricted access. You can then change the access permissions, if +restricted access. You can then change the access permissions, if desired, with `npm access` or on the npmjs.com website. ## Associating a scope with a registry @@ -105,3 +110,4 @@ that registry instead. * npm-install(1) * npm-publish(1) * npm-access(1) +* npm-registry(7) diff --git a/deps/npm/html/doc/README.html b/deps/npm/html/doc/README.html index 1ec7db7078e..c471072b8cf 100644 --- a/deps/npm/html/doc/README.html +++ b/deps/npm/html/doc/README.html @@ -15,8 +15,8 @@

SYNOPSIS

This is just enough info to get you up and running.

Much more info available via npm help once it's installed.

IMPORTANT

-

You need node v0.10 or higher to run this program.

-

To install an old and unsupported version of npm that works on node 0.3 +

You need node v4 or higher to run this program.

+

To install an old and unsupported version of npm that works on node v0.12 and prior, clone the git repo and dig through the old tags and branches.

npm is configured to use npm, Inc.'s public package registry at https://registry.npmjs.org by default.

@@ -60,7 +60,7 @@

Windows Install or Upgrade

Many improvements for Windows users have been made in npm 3 - you will have a better experience if you run a recent version of npm. To upgrade, either use Microsoft's upgrade tool, -download a new version of Node, +download a new version of Node, or follow the Windows upgrade instructions in the npm Troubleshooting Guide.

If that's not fancy enough for you, then you can fetch the code with @@ -126,5 +126,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-access.html b/deps/npm/html/doc/cli/npm-access.html index 85154d56b06..178d8d462db 100644 --- a/deps/npm/html/doc/cli/npm-access.html +++ b/deps/npm/html/doc/cli/npm-access.html @@ -84,5 +84,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-adduser.html b/deps/npm/html/doc/cli/npm-adduser.html index 6e0fcdd59d4..381f3512965 100644 --- a/deps/npm/html/doc/cli/npm-adduser.html +++ b/deps/npm/html/doc/cli/npm-adduser.html @@ -73,5 +73,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-bin.html b/deps/npm/html/doc/cli/npm-bin.html index 2f423f08f3c..2eec18bdd35 100644 --- a/deps/npm/html/doc/cli/npm-bin.html +++ b/deps/npm/html/doc/cli/npm-bin.html @@ -35,5 +35,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-bugs.html b/deps/npm/html/doc/cli/npm-bugs.html index 48d374ad760..41f5d9b2cf6 100644 --- a/deps/npm/html/doc/cli/npm-bugs.html +++ b/deps/npm/html/doc/cli/npm-bugs.html @@ -55,5 +55,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-build.html b/deps/npm/html/doc/cli/npm-build.html index 0ab879ec551..e78f351deca 100644 --- a/deps/npm/html/doc/cli/npm-build.html +++ b/deps/npm/html/doc/cli/npm-build.html @@ -40,5 +40,5 @@

DESCRIPTION

       - + diff --git a/deps/npm/html/doc/cli/npm-bundle.html b/deps/npm/html/doc/cli/npm-bundle.html index 2465a582f71..c45d3b09afe 100644 --- a/deps/npm/html/doc/cli/npm-bundle.html +++ b/deps/npm/html/doc/cli/npm-bundle.html @@ -31,5 +31,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-cache.html b/deps/npm/html/doc/cli/npm-cache.html index a2f6339bb48..0aed55fca5a 100644 --- a/deps/npm/html/doc/cli/npm-cache.html +++ b/deps/npm/html/doc/cli/npm-cache.html @@ -82,5 +82,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-completion.html b/deps/npm/html/doc/cli/npm-completion.html index 31b6d7c0846..d6f553a4534 100644 --- a/deps/npm/html/doc/cli/npm-completion.html +++ b/deps/npm/html/doc/cli/npm-completion.html @@ -43,5 +43,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-config.html b/deps/npm/html/doc/cli/npm-config.html index a9232c539a9..2aa0f880ac2 100644 --- a/deps/npm/html/doc/cli/npm-config.html +++ b/deps/npm/html/doc/cli/npm-config.html @@ -67,5 +67,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-dedupe.html b/deps/npm/html/doc/cli/npm-dedupe.html index 59a83a101eb..e1ca0d1628f 100644 --- a/deps/npm/html/doc/cli/npm-dedupe.html +++ b/deps/npm/html/doc/cli/npm-dedupe.html @@ -61,5 +61,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-deprecate.html b/deps/npm/html/doc/cli/npm-deprecate.html index 5215a2e5b00..557e12086be 100644 --- a/deps/npm/html/doc/cli/npm-deprecate.html +++ b/deps/npm/html/doc/cli/npm-deprecate.html @@ -38,5 +38,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-dist-tag.html b/deps/npm/html/doc/cli/npm-dist-tag.html index bdac1bd3c72..68aabf0fdde 100644 --- a/deps/npm/html/doc/cli/npm-dist-tag.html +++ b/deps/npm/html/doc/cli/npm-dist-tag.html @@ -86,5 +86,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-docs.html b/deps/npm/html/doc/cli/npm-docs.html index 391e94e7246..c15d6b3ceb8 100644 --- a/deps/npm/html/doc/cli/npm-docs.html +++ b/deps/npm/html/doc/cli/npm-docs.html @@ -56,5 +56,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-doctor.html b/deps/npm/html/doc/cli/npm-doctor.html index 05315e7809b..f5cce1cf988 100644 --- a/deps/npm/html/doc/cli/npm-doctor.html +++ b/deps/npm/html/doc/cli/npm-doctor.html @@ -103,4 +103,4 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-edit.html b/deps/npm/html/doc/cli/npm-edit.html index 8ec44dc4457..0d8732ce7a1 100644 --- a/deps/npm/html/doc/cli/npm-edit.html +++ b/deps/npm/html/doc/cli/npm-edit.html @@ -49,5 +49,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-explore.html b/deps/npm/html/doc/cli/npm-explore.html index 65d542ea294..62acbee3fb1 100644 --- a/deps/npm/html/doc/cli/npm-explore.html +++ b/deps/npm/html/doc/cli/npm-explore.html @@ -49,5 +49,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-help-search.html b/deps/npm/html/doc/cli/npm-help-search.html index a976c2590f1..733d7156514 100644 --- a/deps/npm/html/doc/cli/npm-help-search.html +++ b/deps/npm/html/doc/cli/npm-help-search.html @@ -45,5 +45,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-help.html b/deps/npm/html/doc/cli/npm-help.html index 817603d3802..722a9e45e78 100644 --- a/deps/npm/html/doc/cli/npm-help.html +++ b/deps/npm/html/doc/cli/npm-help.html @@ -50,5 +50,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-init.html b/deps/npm/html/doc/cli/npm-init.html index 9c8f752118d..1569d2b75f1 100644 --- a/deps/npm/html/doc/cli/npm-init.html +++ b/deps/npm/html/doc/cli/npm-init.html @@ -48,5 +48,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-install-test.html b/deps/npm/html/doc/cli/npm-install-test.html index 76dba877bda..1939b4e5846 100644 --- a/deps/npm/html/doc/cli/npm-install-test.html +++ b/deps/npm/html/doc/cli/npm-install-test.html @@ -42,5 +42,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-install.html b/deps/npm/html/doc/cli/npm-install.html index 0a16a3e8134..20e5a75a3f3 100644 --- a/deps/npm/html/doc/cli/npm-install.html +++ b/deps/npm/html/doc/cli/npm-install.html @@ -316,5 +316,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-link.html b/deps/npm/html/doc/cli/npm-link.html index 64b7a8c17d6..befca7eac94 100644 --- a/deps/npm/html/doc/cli/npm-link.html +++ b/deps/npm/html/doc/cli/npm-link.html @@ -74,5 +74,5 @@

SYNOPSIS

       - + diff --git a/deps/npm/html/doc/cli/npm-logout.html b/deps/npm/html/doc/cli/npm-logout.html index 0058b380621..f7e890c480a 100644 --- a/deps/npm/html/doc/cli/npm-logout.html +++ b/deps/npm/html/doc/cli/npm-logout.html @@ -51,5 +51,5 @@

scope

       - + diff --git a/deps/npm/html/doc/cli/npm-ls.html b/deps/npm/html/doc/cli/npm-ls.html index d2ce3981539..5293f35ef34 100644 --- a/deps/npm/html/doc/cli/npm-ls.html +++ b/deps/npm/html/doc/cli/npm-ls.html @@ -21,7 +21,7 @@

SYNOPSIS

limit the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

-
npm@4.1.2 /path/to/npm
+
npm@4.2.0 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 

It will print out extraneous, missing, and invalid packages.

@@ -104,5 +104,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-outdated.html b/deps/npm/html/doc/cli/npm-outdated.html index ca9f0ad9de0..de59123cff2 100644 --- a/deps/npm/html/doc/cli/npm-outdated.html +++ b/deps/npm/html/doc/cli/npm-outdated.html @@ -116,5 +116,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-owner.html b/deps/npm/html/doc/cli/npm-owner.html index 5fe0a8b6b1e..b1270f7a7fe 100644 --- a/deps/npm/html/doc/cli/npm-owner.html +++ b/deps/npm/html/doc/cli/npm-owner.html @@ -51,5 +51,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-pack.html b/deps/npm/html/doc/cli/npm-pack.html index bdaf2c08e9a..d74b2defb83 100644 --- a/deps/npm/html/doc/cli/npm-pack.html +++ b/deps/npm/html/doc/cli/npm-pack.html @@ -41,5 +41,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-ping.html b/deps/npm/html/doc/cli/npm-ping.html index 8890f58f84f..3c4ff700fe9 100644 --- a/deps/npm/html/doc/cli/npm-ping.html +++ b/deps/npm/html/doc/cli/npm-ping.html @@ -32,5 +32,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-prefix.html b/deps/npm/html/doc/cli/npm-prefix.html index bc6e193a5e7..ba0ddb12810 100644 --- a/deps/npm/html/doc/cli/npm-prefix.html +++ b/deps/npm/html/doc/cli/npm-prefix.html @@ -38,5 +38,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-prune.html b/deps/npm/html/doc/cli/npm-prune.html index 1d7668847df..60b5c173cdd 100644 --- a/deps/npm/html/doc/cli/npm-prune.html +++ b/deps/npm/html/doc/cli/npm-prune.html @@ -40,5 +40,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-publish.html b/deps/npm/html/doc/cli/npm-publish.html index 51132f75b69..3f137577f96 100644 --- a/deps/npm/html/doc/cli/npm-publish.html +++ b/deps/npm/html/doc/cli/npm-publish.html @@ -76,5 +76,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-rebuild.html b/deps/npm/html/doc/cli/npm-rebuild.html index ce6bd26a21d..c4174bf810b 100644 --- a/deps/npm/html/doc/cli/npm-rebuild.html +++ b/deps/npm/html/doc/cli/npm-rebuild.html @@ -35,5 +35,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-repo.html b/deps/npm/html/doc/cli/npm-repo.html index 740d7672e18..ad2adc55138 100644 --- a/deps/npm/html/doc/cli/npm-repo.html +++ b/deps/npm/html/doc/cli/npm-repo.html @@ -41,5 +41,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-restart.html b/deps/npm/html/doc/cli/npm-restart.html index b4f56de4d00..4e085fd070e 100644 --- a/deps/npm/html/doc/cli/npm-restart.html +++ b/deps/npm/html/doc/cli/npm-restart.html @@ -53,5 +53,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-root.html b/deps/npm/html/doc/cli/npm-root.html index 96a1d56b251..6bc996a3b09 100644 --- a/deps/npm/html/doc/cli/npm-root.html +++ b/deps/npm/html/doc/cli/npm-root.html @@ -35,5 +35,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-run-script.html b/deps/npm/html/doc/cli/npm-run-script.html index 8c4c64eb788..c1221878f6b 100644 --- a/deps/npm/html/doc/cli/npm-run-script.html +++ b/deps/npm/html/doc/cli/npm-run-script.html @@ -66,5 +66,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-search.html b/deps/npm/html/doc/cli/npm-search.html index ddc946c1016..7b142d53fe5 100644 --- a/deps/npm/html/doc/cli/npm-search.html +++ b/deps/npm/html/doc/cli/npm-search.html @@ -109,5 +109,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-shrinkwrap.html b/deps/npm/html/doc/cli/npm-shrinkwrap.html index baad291a3c0..3c3edc39bfd 100644 --- a/deps/npm/html/doc/cli/npm-shrinkwrap.html +++ b/deps/npm/html/doc/cli/npm-shrinkwrap.html @@ -180,5 +180,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-star.html b/deps/npm/html/doc/cli/npm-star.html index 6b29ed04e6c..a29e01f68b6 100644 --- a/deps/npm/html/doc/cli/npm-star.html +++ b/deps/npm/html/doc/cli/npm-star.html @@ -36,5 +36,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-stars.html b/deps/npm/html/doc/cli/npm-stars.html index 53d91da15c8..e528841db2f 100644 --- a/deps/npm/html/doc/cli/npm-stars.html +++ b/deps/npm/html/doc/cli/npm-stars.html @@ -36,5 +36,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-start.html b/deps/npm/html/doc/cli/npm-start.html index f0d07e59561..7b0a14e6fef 100644 --- a/deps/npm/html/doc/cli/npm-start.html +++ b/deps/npm/html/doc/cli/npm-start.html @@ -39,5 +39,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-stop.html b/deps/npm/html/doc/cli/npm-stop.html index e162b4e5d42..780a2320086 100644 --- a/deps/npm/html/doc/cli/npm-stop.html +++ b/deps/npm/html/doc/cli/npm-stop.html @@ -34,5 +34,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-team.html b/deps/npm/html/doc/cli/npm-team.html index 3446eed20b3..0cb0689e7de 100644 --- a/deps/npm/html/doc/cli/npm-team.html +++ b/deps/npm/html/doc/cli/npm-team.html @@ -67,5 +67,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-test.html b/deps/npm/html/doc/cli/npm-test.html index 52b87d2fb18..42686910750 100644 --- a/deps/npm/html/doc/cli/npm-test.html +++ b/deps/npm/html/doc/cli/npm-test.html @@ -36,5 +36,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-uninstall.html b/deps/npm/html/doc/cli/npm-uninstall.html index 399d5c9f5ba..eda627104f2 100644 --- a/deps/npm/html/doc/cli/npm-uninstall.html +++ b/deps/npm/html/doc/cli/npm-uninstall.html @@ -60,5 +60,5 @@

SYNOPSIS

       - + diff --git a/deps/npm/html/doc/cli/npm-unpublish.html b/deps/npm/html/doc/cli/npm-unpublish.html index d9a94d5bd58..296b5279ec1 100644 --- a/deps/npm/html/doc/cli/npm-unpublish.html +++ b/deps/npm/html/doc/cli/npm-unpublish.html @@ -47,5 +47,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-update.html b/deps/npm/html/doc/cli/npm-update.html index 12046b2b103..805232b0d30 100644 --- a/deps/npm/html/doc/cli/npm-update.html +++ b/deps/npm/html/doc/cli/npm-update.html @@ -118,5 +118,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-version.html b/deps/npm/html/doc/cli/npm-version.html index d015d34c7e9..c40e6632932 100644 --- a/deps/npm/html/doc/cli/npm-version.html +++ b/deps/npm/html/doc/cli/npm-version.html @@ -107,5 +107,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-view.html b/deps/npm/html/doc/cli/npm-view.html index eec35f2b748..13747400041 100644 --- a/deps/npm/html/doc/cli/npm-view.html +++ b/deps/npm/html/doc/cli/npm-view.html @@ -86,5 +86,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm-whoami.html b/deps/npm/html/doc/cli/npm-whoami.html index c6199636314..ac8c1c6bc3c 100644 --- a/deps/npm/html/doc/cli/npm-whoami.html +++ b/deps/npm/html/doc/cli/npm-whoami.html @@ -33,5 +33,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/cli/npm.html b/deps/npm/html/doc/cli/npm.html index fa780891aaa..e1e53c2018a 100644 --- a/deps/npm/html/doc/cli/npm.html +++ b/deps/npm/html/doc/cli/npm.html @@ -13,7 +13,7 @@

npm

javascript package manager

SYNOPSIS

npm <command> [args]
 

VERSION

-

4.1.2

+

4.2.0

DESCRIPTION

npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency @@ -126,7 +126,7 @@

AUTHOR

Isaac Z. Schlueter :: isaacs :: @izs :: -i@izs.me

+i@izs.me

SEE ALSO

  • npm-help(1)
  • @@ -150,5 +150,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/files/npm-folders.html b/deps/npm/html/doc/files/npm-folders.html index 036edccd181..4b809307c10 100644 --- a/deps/npm/html/doc/files/npm-folders.html +++ b/deps/npm/html/doc/files/npm-folders.html @@ -182,5 +182,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/files/npm-global.html b/deps/npm/html/doc/files/npm-global.html index 036edccd181..4b809307c10 100644 --- a/deps/npm/html/doc/files/npm-global.html +++ b/deps/npm/html/doc/files/npm-global.html @@ -182,5 +182,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/files/npm-json.html b/deps/npm/html/doc/files/npm-json.html index 5b7d54a3420..70a1022c3c9 100644 --- a/deps/npm/html/doc/files/npm-json.html +++ b/deps/npm/html/doc/files/npm-json.html @@ -94,7 +94,7 @@

    license

    // Not valid metadata
     { "license" :
       { "type" : "ISC"
    -  , "url" : "http://opensource.org/licenses/ISC"
    +  , "url" : "https://opensource.org/licenses/ISC"
       }
     }
     
    @@ -102,10 +102,10 @@ 

    license

    { "licenses" : [ { "type": "MIT" - , "url": "http://www.opensource.org/licenses/mit-license.php" + , "url": "https://www.opensource.org/licenses/mit-license.php" } , { "type": "Apache-2.0" - , "url": "http://opensource.org/licenses/apache2.0.php" + , "url": "https://opensource.org/licenses/apache2.0.php" } ] } @@ -586,5 +586,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/files/npmrc.html b/deps/npm/html/doc/files/npmrc.html index 08e417aecd2..a9c0dddf33c 100644 --- a/deps/npm/html/doc/files/npmrc.html +++ b/deps/npm/html/doc/files/npmrc.html @@ -85,5 +85,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/files/package.json.html b/deps/npm/html/doc/files/package.json.html index 5b7d54a3420..70a1022c3c9 100644 --- a/deps/npm/html/doc/files/package.json.html +++ b/deps/npm/html/doc/files/package.json.html @@ -94,7 +94,7 @@

    license

    // Not valid metadata
     { "license" :
       { "type" : "ISC"
    -  , "url" : "http://opensource.org/licenses/ISC"
    +  , "url" : "https://opensource.org/licenses/ISC"
       }
     }
     
    @@ -102,10 +102,10 @@ 

    license

    { "licenses" : [ { "type": "MIT" - , "url": "http://www.opensource.org/licenses/mit-license.php" + , "url": "https://www.opensource.org/licenses/mit-license.php" } , { "type": "Apache-2.0" - , "url": "http://opensource.org/licenses/apache2.0.php" + , "url": "https://opensource.org/licenses/apache2.0.php" } ] } @@ -586,5 +586,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/index.html b/deps/npm/html/doc/index.html index 9323a0af02e..b6c448f05a6 100644 --- a/deps/npm/html/doc/index.html +++ b/deps/npm/html/doc/index.html @@ -162,5 +162,5 @@

    semver(7)

           - + diff --git a/deps/npm/html/doc/misc/npm-coding-style.html b/deps/npm/html/doc/misc/npm-coding-style.html index f56c3dfc348..840d6274b1f 100644 --- a/deps/npm/html/doc/misc/npm-coding-style.html +++ b/deps/npm/html/doc/misc/npm-coding-style.html @@ -153,5 +153,5 @@

    SEE ALSO

           - + diff --git a/deps/npm/html/doc/misc/npm-config.html b/deps/npm/html/doc/misc/npm-config.html index f6ba450b1a3..666f6d4b01d 100644 --- a/deps/npm/html/doc/misc/npm-config.html +++ b/deps/npm/html/doc/misc/npm-config.html @@ -497,6 +497,12 @@

    logstream

    stderr.

    If the color config is set to true, then this stream will receive colored output if it is a TTY.

    +

    logs-max

    +
      +
    • Default: 10
    • +
    • Type: Number
    • +
    +

    The maximum number of log files to store.

    long

    • Default: false
    • @@ -722,12 +728,20 @@

      searchopts

    • Type: String

    Space-separated options that are always passed to search.

    +

    searchlimit

    +
      +
    • Default: 20
    • +
    • Type: Number
    • +
    +

    Number of items to limit search results to. Will not apply at all to legacy +searches.

    searchstaleness

    • Default: 900 (15 minutes)
    • Type: Number
    -

    The age of the cache, in seconds, before another registry request is made.

    +

    The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint.

    send-metrics

    • Default: false
    • @@ -887,5 +901,5 @@

      SEE ALSO

             - + diff --git a/deps/npm/html/doc/misc/npm-developers.html b/deps/npm/html/doc/misc/npm-developers.html index b0b07a936f8..13b0c3d14f6 100644 --- a/deps/npm/html/doc/misc/npm-developers.html +++ b/deps/npm/html/doc/misc/npm-developers.html @@ -194,5 +194,5 @@

      SEE ALSO

             - + diff --git a/deps/npm/html/doc/misc/npm-disputes.html b/deps/npm/html/doc/misc/npm-disputes.html index 3add632e0d8..731f73ff1d7 100644 --- a/deps/npm/html/doc/misc/npm-disputes.html +++ b/deps/npm/html/doc/misc/npm-disputes.html @@ -20,7 +20,7 @@

      npm-disputes

      Handling Module

      TL;DR

      1. Get the author email with npm owner ls <pkgname>
      2. -
      3. Email the author, CC support@npmjs.com
      4. +
      5. Email the author, CC support@npmjs.com
      6. After a few weeks, if there's no resolution, we'll sort it out.

      Don't squat on package names. Publish code or move out of the way.

      @@ -55,12 +55,12 @@

      DESCRIPTION

    • Alice emails Yusuf, explaining the situation as respectfully as possible, and what she would like to do with the module name. She adds the npm support -staff support@npmjs.com to the CC list of the email. Mention in the email +staff support@npmjs.com to the CC list of the email. Mention in the email that Yusuf can run npm owner add alice foo to add Alice as an owner of the foo package.
    • After a reasonable amount of time, if Yusuf has not responded, or if Yusuf and Alice can't come to any sort of resolution, email support -support@npmjs.com and we'll sort it out. ("Reasonable" is usually at least +support@npmjs.com and we'll sort it out. ("Reasonable" is usually at least 4 weeks.)
    • REASONING

      @@ -96,12 +96,12 @@

      EXCEPTIONS

      Code of Conduct such as hateful language, pornographic content, or harassment. -

      If you see bad behavior like this, please report it to abuse@npmjs.com right +

      If you see bad behavior like this, please report it to abuse@npmjs.com right away. You are never expected to resolve abusive behavior on your own. We are here to help.

      TRADEMARKS

      If you think another npm publisher is infringing your trademark, such as by -using a confusingly similar package name, email abuse@npmjs.com with a link to +using a confusingly similar package name, email abuse@npmjs.com with a link to the package or user account on https://npmjs.com. Attach a copy of your trademark registration certificate.

      If we see that the package's publisher is intentionally misleading others by @@ -134,5 +134,5 @@

      SEE ALSO

             - + diff --git a/deps/npm/html/doc/misc/npm-index.html b/deps/npm/html/doc/misc/npm-index.html index 171efc448fe..c3a177c3b05 100644 --- a/deps/npm/html/doc/misc/npm-index.html +++ b/deps/npm/html/doc/misc/npm-index.html @@ -162,5 +162,5 @@

      semver(7)

             - + diff --git a/deps/npm/html/doc/misc/npm-orgs.html b/deps/npm/html/doc/misc/npm-orgs.html index 57a40ca7e74..295839e289b 100644 --- a/deps/npm/html/doc/misc/npm-orgs.html +++ b/deps/npm/html/doc/misc/npm-orgs.html @@ -86,5 +86,5 @@

      Team Admins create teams

             - + diff --git a/deps/npm/html/doc/misc/npm-registry.html b/deps/npm/html/doc/misc/npm-registry.html index 727c174dfd3..79eb492303d 100644 --- a/deps/npm/html/doc/misc/npm-registry.html +++ b/deps/npm/html/doc/misc/npm-registry.html @@ -90,5 +90,5 @@

      SEE ALSO

             - + diff --git a/deps/npm/html/doc/misc/npm-scope.html b/deps/npm/html/doc/misc/npm-scope.html index 109e68c0f78..eab13e7bebe 100644 --- a/deps/npm/html/doc/misc/npm-scope.html +++ b/deps/npm/html/doc/misc/npm-scope.html @@ -12,47 +12,51 @@

      npm-scope

      Scoped packages

      DESCRIPTION

      All npm packages have a name. Some package names also have a scope. A scope -follows the usual rules for package names (url-safe characters, no leading dots -or underscores). When used in package names, preceded by an @-symbol and -followed by a slash, e.g.

      +follows the usual rules for package names (URL-safe characters, no leading dots +or underscores). When used in package names, scopes are preceded by an @ symbol +and followed by a slash, e.g.

      @somescope/somepackagename
       

      Scopes are a way of grouping related packages together, and also affect a few things about the way npm treats the package.

      +

      Each npm user/organization has their own scope, and only you can add packages +in your scope. This means you don't have to worry about someone taking your +package name ahead of you. Thus it is also a good way to signal official packages +for organizations.

      Scoped packages can be published and installed as of npm@2 and are supported -by the primary npm registry. The npm client is backwards-compatible with -un-scoped registries, so it can be used to work with scoped and un-scoped -registries at the same time.

      +by the primary npm registry. Unscoped packages can depend on scoped packages and +vice versa. The npm client is backwards-compatible with unscoped registries, +so it can be used to work with scoped and unscoped registries at the same time.

      Installing scoped packages

      Scoped packages are installed to a sub-folder of the regular installation folder, e.g. if your other packages are installed in node_modules/packagename, -scoped modules will be in node_modules/@myorg/packagename. The scope folder -(@myorg) is simply the name of the scope preceded by an @-symbol, and can +scoped modules will be installed in node_modules/@myorg/packagename. The scope +folder (@myorg) is simply the name of the scope preceded by an @ symbol, and can contain any number of scoped packages.

      A scoped package is installed by referencing it by name, preceded by an -@-symbol, in npm install:

      +@ symbol, in npm install:

      npm install @myorg/mypackage
       

      Or in package.json:

      "dependencies": {
         "@myorg/mypackage": "^1.3.0"
       }
      -

      Note that if the @-symbol is omitted in either case npm will instead attempt to +

    Note that if the @ symbol is omitted, in either case, npm will instead attempt to install from GitHub; see npm-install(1).

    Requiring scoped packages

    Because scoped packages are installed into a scope folder, you have to include the name of the scope when requiring them in your code, e.g.

    require('@myorg/mypackage')
    -

    There is nothing special about the way Node treats scope folders, this is -just specifying to require the module mypackage in the folder called @myorg.

    +

    There is nothing special about the way Node treats scope folders. This +simply requires the mypackage module in the folder named @myorg.

    Publishing scoped packages

    Scoped packages can be published from the CLI as of npm@2 and can be published to any registry that supports them, including the primary npm registry.

    (As of 2015-04-19, and with npm 2.0 or better, the primary npm registry -does support scoped packages)

    +does support scoped packages.)

    If you wish, you may associate a scope with a registry; see below.

    Publishing public scoped packages to the primary npm registry

    To publish a public scoped package, you must specify --access public with -the initial publication. This will publish the package and set access +the initial publication. This will publish the package and set access to public as if you had run npm access public after publishing.

    Publishing private scoped packages to the npm registry

    To publish a private scoped package to the npm registry, you must have @@ -60,7 +64,7 @@

    Publishing priva account.

    You can then publish the module with npm publish or npm publish --access restricted, and it will be present in the npm registry, with -restricted access. You can then change the access permissions, if +restricted access. You can then change the access permissions, if desired, with npm access or on the npmjs.com website.

    Associating a scope with a registry

    Scopes can be associated with a separate registry. This allows you to @@ -81,6 +85,7 @@

    SEE ALSO

  • npm-install(1)
  • npm-publish(1)
  • npm-access(1)
  • +
  • npm-registry(7)
@@ -94,5 +99,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/misc/npm-scripts.html b/deps/npm/html/doc/misc/npm-scripts.html index 453e76035e0..3b37178494b 100644 --- a/deps/npm/html/doc/misc/npm-scripts.html +++ b/deps/npm/html/doc/misc/npm-scripts.html @@ -237,5 +237,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/misc/removing-npm.html b/deps/npm/html/doc/misc/removing-npm.html index b04d851e018..aeaea468d1b 100644 --- a/deps/npm/html/doc/misc/removing-npm.html +++ b/deps/npm/html/doc/misc/removing-npm.html @@ -57,5 +57,5 @@

SEE ALSO

       - + diff --git a/deps/npm/html/doc/misc/semver.html b/deps/npm/html/doc/misc/semver.html index dcee1f356db..48dee7d64b0 100644 --- a/deps/npm/html/doc/misc/semver.html +++ b/deps/npm/html/doc/misc/semver.html @@ -325,5 +325,5 @@

Ranges

       - + diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js index 81ead08b6f9..a79c8a0fa07 100644 --- a/deps/npm/lib/config/defaults.js +++ b/deps/npm/lib/config/defaults.js @@ -164,6 +164,7 @@ Object.defineProperty(exports, 'defaults', {get: function () { 'local-address': undefined, loglevel: 'warn', logstream: process.stderr, + 'logs-max': 10, long: false, maxsockets: 50, message: '%s', @@ -196,6 +197,7 @@ Object.defineProperty(exports, 'defaults', {get: function () { 'scripts-prepend-node-path': 'warn-only', searchopts: '', searchexclude: null, + searchlimit: 20, searchstaleness: 15 * 60, 'send-metrics': false, shell: osenv.shell(), @@ -278,6 +280,7 @@ exports.types = { 'local-address': getLocalAddresses(), loglevel: ['silent', 'error', 'warn', 'http', 'info', 'verbose', 'silly'], logstream: Stream, + 'logs-max': Number, long: Boolean, maxsockets: Number, message: String, @@ -305,6 +308,7 @@ exports.types = { 'scripts-prepend-node-path': [false, true, 'auto', 'warn-only'], searchopts: String, searchexclude: [null, String], + searchlimit: Number, searchstaleness: Number, 'send-metrics': Boolean, shell: String, diff --git a/deps/npm/lib/doctor.js b/deps/npm/lib/doctor.js index f2037ab86c0..18453491e46 100644 --- a/deps/npm/lib/doctor.js +++ b/deps/npm/lib/doctor.js @@ -98,7 +98,7 @@ function makePretty (p) { if (ping !== 'ok') list[0][2] = 'Check your internet connection' if (!semver.satisfies(npmV, '>=' + npmLTS)) list[1][2] = 'Use npm v' + npmLTS if (!semver.satisfies(nodeV, '>=' + nodeLTS)) list[2][2] = 'Use node v' + nodeLTS - if (registry !== defaultRegistry) list[3][2] = 'Try `npm config set registry ' + defaultRegistry + if (registry !== defaultRegistry) list[3][2] = 'Try `npm config set registry ' + defaultRegistry + '`' if (whichGit === 'not installed') list[4][2] = 'Install git and ensure it\'s in your PATH.' if (readbleCaches !== 'ok') list[5][2] = 'Check the permissions of your files in ' + npm.config.get('cache') if (executableGlobalModules !== 'ok') list[6][2] = globalNodeModules + ' must be readable and writable by the current user.' diff --git a/deps/npm/lib/fetch-package-metadata.md b/deps/npm/lib/fetch-package-metadata.md index 6a7d4fc595d..6fe4beac6ee 100644 --- a/deps/npm/lib/fetch-package-metadata.md +++ b/deps/npm/lib/fetch-package-metadata.md @@ -5,7 +5,7 @@ fetch-package-metadata fetchPackageMetadata(spec, contextdir, callback) This will get package metadata (and if possible, ONLY package metadata) for -a specifer as passed to `npm install` et al, eg `npm@next` or `npm@^2.0.3` +a specifier as passed to `npm install` et al, eg `npm@next` or `npm@^2.0.3` ## fetchPackageMetadata(*spec*, *contextdir*, *tracker*, *callback*) @@ -29,7 +29,7 @@ a specifer as passed to `npm install` et al, eg `npm@next` or `npm@^2.0.3` In the case of tarballs and git repos, it will use the cache to download them in order to get the package metadata. For named packages, only the -metadata is downloaded (eg http://registry.npmjs.org/package). For local +metadata is downloaded (eg https://registry.npmjs.org/package). For local directories, the package.json is read directly. For local tarballs, the tarball is streamed in memory and just the package.json is extracted from it. (Due to the nature of tars, having the package.json early in the file diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index d0af64b1957..890c369cc6c 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -29,6 +29,8 @@ var path = require('path') var abbrev = require('abbrev') var which = require('which') + var glob = require('glob') + var rimraf = require('rimraf') var CachingRegClient = require('./cache/caching-client.js') var parseJSON = require('./utils/parse-json.js') var aliases = require('./config/cmd-list').aliases @@ -286,8 +288,21 @@ log.disableProgress() } + glob(path.resolve(npm.cache, '_logs', '*-debug.log'), function (er, files) { + if (er) return cb(er) + + while (files.length >= npm.config.get('logs-max')) { + rimraf.sync(files[0]) + files.splice(0, 1) + } + }) + log.resume() + // at this point the configs are all set. + // go ahead and spin up the registry client. + npm.registry = new CachingRegClient(npm.config) + var umask = npm.config.get('umask') npm.modes = { exec: parseInt('0777', 8) & (~umask), diff --git a/deps/npm/lib/search.js b/deps/npm/lib/search.js index c9f3628717a..3987be135c9 100644 --- a/deps/npm/lib/search.js +++ b/deps/npm/lib/search.js @@ -3,8 +3,8 @@ module.exports = exports = search var npm = require('./npm.js') -var allPackageMetadata = require('./search/all-package-metadata.js') -var packageFilter = require('./search/package-filter.js') +var allPackageSearch = require('./search/all-package-search') +var esearch = require('./search/esearch.js') var formatPackageStream = require('./search/format-package-stream.js') var usage = require('./utils/usage') var output = require('./utils/output.js') @@ -21,33 +21,44 @@ search.completion = function (opts, cb) { } function search (args, cb) { - var staleness = npm.config.get('searchstaleness') + var searchOpts = { + description: npm.config.get('description'), + exclude: prepareExcludes(npm.config.get('searchexclude')), + include: prepareIncludes(args, npm.config.get('searchopts')), + limit: npm.config.get('searchlimit'), + log: log, + staleness: npm.config.get('searchstaleness'), + unicode: npm.config.get('unicode') + } - var include = prepareIncludes(args, npm.config.get('searchopts')) - if (include.length === 0) { + if (searchOpts.include.length === 0) { return cb(new Error('search must be called with arguments')) } - var exclude = prepareExcludes(npm.config.get('searchexclude')) - // Used later to figure out whether we had any packages go out var anyOutput = false - // Get a stream with *all* the packages. This takes care of dealing - // with the local cache as well, but that's an internal detail. - var allEntriesStream = allPackageMetadata(staleness) + var entriesStream = ms.through.obj() - // Grab a stream that filters those packages according to given params. - var searchSection = (npm.config.get('unicode') ? '🤔 ' : '') + 'search' - var filterStream = streamFilter(function (pkg) { - log.gauge.pulse('search') - log.gauge.show({section: searchSection, logline: 'scanning ' + pkg.name}) - // Simply 'true' if the package matches search parameters. - var match = packageFilter(pkg, include, exclude, { - description: npm.config.get('description') + var esearchWritten = false + esearch(searchOpts).on('data', function (pkg) { + entriesStream.write(pkg) + !esearchWritten && (esearchWritten = true) + }).on('error', function (e) { + if (esearchWritten) { + // If esearch errored after already starting output, we can't fall back. + return entriesStream.emit('error', e) + } + log.warn('search', 'fast search endpoint errored. Using old search.') + allPackageSearch(searchOpts).on('data', function (pkg) { + entriesStream.write(pkg) + }).on('error', function (e) { + entriesStream.emit('error', e) + }).on('end', function () { + entriesStream.end() }) - if (match) { anyOutput = true } - return match + }).on('end', function () { + entriesStream.end() }) // Grab a configured output stream that will spit out packages in the @@ -61,16 +72,17 @@ function search (args, cb) { color: npm.color }) outputStream.on('data', function (chunk) { + if (!anyOutput) { anyOutput = true } output(chunk.toString('utf8')) }) log.silly('search', 'searching packages') - ms.pipe(allEntriesStream, filterStream, outputStream, function (er) { + ms.pipe(entriesStream, outputStream, function (er) { if (er) return cb(er) if (!anyOutput && !npm.config.get('json') && !npm.config.get('parseable')) { output('No matches found for ' + (args.map(JSON.stringify).join(' '))) } - log.silly('search', 'index search completed') + log.silly('search', 'search completed') log.clearProgress() cb(null, {}) }) @@ -94,12 +106,3 @@ function prepareExcludes (searchexclude) { return s.toLowerCase() }) } - -function streamFilter (filter) { - return ms.through.obj(function (chunk, enc, cb) { - if (filter(chunk)) { - this.push(chunk) - } - cb() - }) -} diff --git a/deps/npm/lib/search/all-package-metadata.js b/deps/npm/lib/search/all-package-metadata.js index ce917f5ef05..fda5b6d3064 100644 --- a/deps/npm/lib/search/all-package-metadata.js +++ b/deps/npm/lib/search/all-package-metadata.js @@ -14,6 +14,7 @@ var writeStreamAtomic = require('fs-write-stream-atomic') var ms = require('mississippi') var sortedUnionStream = require('sorted-union-stream') var once = require('once') +var gunzip = require('../utils/gunzip-maybe') // Returns a sorted stream of all package metadata. Internally, takes care of // maintaining its metadata cache and making partial or full remote requests, @@ -153,6 +154,7 @@ function createEntryUpdateStream (all, auth, staleness, latest, cb) { ms.through(function (chunk, enc, cb) { cb(null, chunk) }), + gunzip(), jsonstream.parse('*', function (pkg, key) { if (key[0] === '_updated' || key[0][0] !== '_') { return pkg diff --git a/deps/npm/lib/search/all-package-search.js b/deps/npm/lib/search/all-package-search.js new file mode 100644 index 00000000000..7a893d517b8 --- /dev/null +++ b/deps/npm/lib/search/all-package-search.js @@ -0,0 +1,50 @@ +var ms = require('mississippi') +var allPackageMetadata = require('./all-package-metadata') +var packageFilter = require('./package-filter.js') + +module.exports = allPackageSearch +function allPackageSearch (opts) { + var searchSection = (opts.unicode ? '🤔 ' : '') + 'search' + + // Get a stream with *all* the packages. This takes care of dealing + // with the local cache as well, but that's an internal detail. + var allEntriesStream = allPackageMetadata(opts.staleness) + + // Grab a stream that filters those packages according to given params. + var filterStream = streamFilter(function (pkg) { + opts.log.gauge.pulse('search') + opts.log.gauge.show({section: searchSection, logline: 'scanning ' + pkg.name}) + // Simply 'true' if the package matches search parameters. + var match = packageFilter(pkg, opts.include, opts.exclude, { + description: opts.description + }) + return match + }) + return ms.pipeline.obj(allEntriesStream, filterStream) +} + +function streamFilter (filter) { + return ms.through.obj(function (data, enc, cb) { + if (filter(data)) { + this.push(standardizePkg(data)) + } + cb() + }) +} + +function standardizePkg (data) { + return { + name: data.name, + description: data.description, + maintainers: (data.maintainers || []).map(function (m) { + return { username: m.name, email: m.email } + }), + keywords: data.keywords || [], + version: Object.keys(data.versions || {})[0] || [], + date: ( + data.time && + data.time.modified && + new Date(data.time.modified) + ) || null + } +} diff --git a/deps/npm/lib/search/esearch.js b/deps/npm/lib/search/esearch.js new file mode 100644 index 00000000000..f4beb7ade66 --- /dev/null +++ b/deps/npm/lib/search/esearch.js @@ -0,0 +1,64 @@ +'use strict' + +var npm = require('../npm.js') +var log = require('npmlog') +var mapToRegistry = require('../utils/map-to-registry.js') +var jsonstream = require('JSONStream') +var ms = require('mississippi') +var gunzip = require('../utils/gunzip-maybe') + +module.exports = esearch + +function esearch (opts) { + var stream = ms.through.obj() + + mapToRegistry('-/v1/search', npm.config, function (er, uri, auth) { + if (er) return stream.emit('error', er) + createResultStream(uri, auth, opts, function (err, resultStream) { + if (err) return stream.emit('error', err) + ms.pipeline.obj(resultStream, stream) + }) + }) + return stream +} + +function createResultStream (uri, auth, opts, cb) { + log.verbose('esearch', 'creating remote entry stream') + var params = { + timeout: 600, + follow: true, + staleOk: true, + auth: auth, + streaming: true + } + var q = buildQuery(opts) + npm.registry.request(uri + '?text=' + encodeURIComponent(q) + '&size=' + opts.limit, params, function (err, res) { + if (err) return cb(err) + log.silly('esearch', 'request stream opened, code:', res.statusCode) + // NOTE - The stream returned by `request` seems to be very persnickety + // and this is almost a magic incantation to get it to work. + // Modify how `res` is used here at your own risk. + var entryStream = ms.pipeline.obj( + res, + ms.through(function (chunk, enc, cb) { + cb(null, chunk) + }), + gunzip(), + jsonstream.parse('objects.*.package', function (data) { + return { + name: data.name, + description: data.description, + maintainers: data.maintainers, + keywords: data.keywords, + version: data.version, + date: data.date ? new Date(data.date) : null + } + }) + ) + return cb(null, entryStream) + }) +} + +function buildQuery (opts) { + return opts.include.join(' ') +} diff --git a/deps/npm/lib/search/format-package-stream.js b/deps/npm/lib/search/format-package-stream.js index d4333dc1a6a..caa46cb8987 100644 --- a/deps/npm/lib/search/format-package-stream.js +++ b/deps/npm/lib/search/format-package-stream.js @@ -4,6 +4,20 @@ var ms = require('mississippi') var jsonstream = require('JSONStream') var columnify = require('columnify') +// This module consumes package data in the following format: +// +// { +// name: String, +// description: String, +// maintainers: [{ username: String, email: String }], +// keywords: String | [String], +// version: String, +// date: Date // can be null, +// } +// +// The returned stream will format this package data +// into a byte stream of formatted, displayable output. + module.exports = formatPackageStream function formatPackageStream (opts) { opts = opts || {} @@ -33,26 +47,7 @@ function prettify (data, num, opts) { opts = opts || {} var truncate = !opts.long - var dat = stripData(data, opts) - dat.author = dat.maintainers - delete dat.maintainers - dat.date = dat.time - delete dat.time - // split keywords on whitespace or , - if (typeof dat.keywords === 'string') { - dat.keywords = dat.keywords.split(/[,\s]+/) - } - if (Array.isArray(dat.keywords)) { - dat.keywords = dat.keywords.join(' ') - } - - // split author on whitespace or , - if (typeof dat.author === 'string') { - dat.author = dat.author.split(/[,\s]+/) - } - if (Array.isArray(dat.author)) { - dat.author = dat.author.join(' ') - } + var pkg = normalizePackage(data, opts) var columns = opts.description ? ['name', 'description', 'author', 'date', 'version', 'keywords'] @@ -60,12 +55,12 @@ function prettify (data, num, opts) { if (opts.parseable) { return columns.map(function (col) { - return dat[col] && ('' + dat[col]).replace(/\t/g, ' ') + return pkg[col] && ('' + pkg[col]).replace(/\t/g, ' ') }).join('\t') } var output = columnify( - [dat], + [pkg], { include: columns, showHeaders: num <= 1, @@ -153,20 +148,22 @@ function highlightSearchTerms (str, terms) { return colorize(str).trim() } -function stripData (data, opts) { +function normalizePackage (data, opts) { opts = opts || {} return { name: data.name, description: opts.description ? data.description : '', - maintainers: (data.maintainers || []).map(function (m) { - return '=' + m.name - }), - url: !Object.keys(data.versions || {}).length ? data.url : null, - keywords: data.keywords || [], + author: (data.maintainers || []).map(function (m) { + return '=' + m.username + }).join(' '), + keywords: Array.isArray(data.keywords) + ? data.keywords.join(' ') + : typeof data.keywords === 'string' + ? data.keywords.replace(/[,\s]+/, ' ') + : '', version: Object.keys(data.versions || {})[0] || [], - time: data.time && - data.time.modified && - (new Date(data.time.modified).toISOString() // remove time + date: data.date && + (data.date.toISOString() // remove time .split('T').join(' ') .replace(/:[0-9]{2}\.[0-9]{3}Z$/, '')) .slice(0, -5) || diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js index f3cfc3c5ea2..a2911a101aa 100644 --- a/deps/npm/lib/utils/error-handler.js +++ b/deps/npm/lib/utils/error-handler.js @@ -14,6 +14,15 @@ var chain = require('slide').chain var writeStreamAtomic = require('fs-write-stream-atomic') var errorMessage = require('./error-message.js') var stopMetrics = require('./metrics.js').stop +var mkdir = require('mkdirp') + +var logFileName +function getLogFile () { + if (!logFileName) { + logFileName = path.resolve(npm.config.get('cache'), '_logs', (new Date()).toISOString().replace(/[.:]/g, '_') + '-debug.log') + } + return logFileName +} process.on('exit', function (code) { log.disableProgress() @@ -37,7 +46,7 @@ process.on('exit', function (code) { '', [ 'Please include the following file with any support request:', - ' ' + path.resolve('npm-debug.log') + ' ' + getLogFile() ].join('\n') ) wroteLogFile = false @@ -79,7 +88,7 @@ function exit (code, noLog) { else writeLogFile(reallyExit.bind(this, er)) } else { if (!noLog && code) writeLogFile(reallyExit) - else rm('npm-debug.log', reallyExit) + else rm(getLogFile(), reallyExit) } }) rollbacks.length = 0 @@ -191,22 +200,26 @@ function writeLogFile (cb) { writingLogFile = true wroteLogFile = true - var fstr = writeStreamAtomic('npm-debug.log') var os = require('os') - var out = '' - log.record.forEach(function (m) { - var pref = [m.id, m.level] - if (m.prefix) pref.push(m.prefix) - pref = pref.join(' ') - - m.message.trim().split(/\r?\n/).map(function (line) { - return (pref + ' ' + line).trim() - }).forEach(function (line) { - out += line + os.EOL + mkdir(path.resolve(npm.config.get('cache'), '_logs'), function (er) { + if (er) { + cb(er) + return + } + var fstr = writeStreamAtomic(getLogFile()) + log.record.forEach(function (m) { + var pref = [m.id, m.level] + if (m.prefix) pref.push(m.prefix) + pref = pref.join(' ') + + m.message.trim().split(/\r?\n/).map(function (line) { + return (pref + ' ' + line).trim() + }).forEach(function (line) { + fstr.write(line + os.EOL) + }) }) + fstr.end() + fstr.on('close', cb) }) - - fstr.end(out) - fstr.on('close', cb) } diff --git a/deps/npm/lib/utils/gunzip-maybe.js b/deps/npm/lib/utils/gunzip-maybe.js new file mode 100644 index 00000000000..db75f060171 --- /dev/null +++ b/deps/npm/lib/utils/gunzip-maybe.js @@ -0,0 +1,22 @@ +var duplex = require('mississippi').duplex +var through = require('mississippi').through +var zlib = require('zlib') + +function hasGzipHeader (c) { + return c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08 +} + +module.exports = gunzip +function gunzip () { + var stream = duplex() + var peeker = through(function (chunk, enc, cb) { + var newStream = hasGzipHeader(chunk) + ? zlib.createGunzip() + : through() + stream.setReadable(newStream) + stream.setWritable(newStream) + stream.write(chunk) + }) + stream.setWritable(peeker) + return stream +} diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js index a1d1625153d..f48d5e2a3bd 100644 --- a/deps/npm/lib/utils/lifecycle.js +++ b/deps/npm/lib/utils/lifecycle.js @@ -277,6 +277,7 @@ function runCmd_ (cmd, pkg, env, wd, stage, unsafe, uid, gid, cb_) { process.kill(process.pid, signal) } else if (code) { var er = new Error('Exit status ' + code) + er.errno = code } procError(er) }) diff --git a/deps/npm/man/man1/npm-README.1 b/deps/npm/man/man1/npm-README.1 index b7c50819f83..1af9c6a0bd5 100644 --- a/deps/npm/man/man1/npm-README.1 +++ b/deps/npm/man/man1/npm-README.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "January 2017" "" "" +.TH "NPM" "1" "February 2017" "" "" .SH "NAME" \fBnpm\fR \- a JavaScript package manager .P @@ -10,9 +10,9 @@ This is just enough info to get you up and running\. Much more info available via \fBnpm help\fP once it's installed\. .SH IMPORTANT .P -\fBYou need node v0\.10 or higher to run this program\.\fR +\fBYou need node v4 or higher to run this program\.\fR .P -To install an old \fBand unsupported\fR version of npm that works on node 0\.3 +To install an old \fBand unsupported\fR version of npm that works on node v0\.12 and prior, clone the git repo and dig through the old tags and branches\. .P \fBnpm is configured to use npm, Inc\.'s public package registry at @@ -83,7 +83,7 @@ for testing, or running stuff without actually installing npm itself\.) Many improvements for Windows users have been made in npm 3 \- you will have a better experience if you run a recent version of npm\. To upgrade, either use Microsoft's upgrade tool \fIhttps://github\.com/felixrieseberg/npm\-windows\-upgrade\fR, -download a new version of Node \fIhttp://nodejs\.org/download/\fR, +download a new version of Node \fIhttps://nodejs\.org/en/download/\fR, or follow the Windows upgrade instructions in the npm Troubleshooting Guide \fIhttps://github\.com/npm/npm/wiki/Troubleshooting#upgrading\-on\-windows\fR\|\. .P diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 049e228e4fd..61f97269bca 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ACCESS" "1" "January 2017" "" "" +.TH "NPM\-ACCESS" "1" "February 2017" "" "" .SH "NAME" \fBnpm-access\fR \- Set access level on published packages .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index 3b68c1efff1..23862636365 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ADDUSER" "1" "January 2017" "" "" +.TH "NPM\-ADDUSER" "1" "February 2017" "" "" .SH "NAME" \fBnpm-adduser\fR \- Add a registry user account .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-bin.1 b/deps/npm/man/man1/npm-bin.1 index c10997f3f63..fb6e29c0b97 100644 --- a/deps/npm/man/man1/npm-bin.1 +++ b/deps/npm/man/man1/npm-bin.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BIN" "1" "January 2017" "" "" +.TH "NPM\-BIN" "1" "February 2017" "" "" .SH "NAME" \fBnpm-bin\fR \- Display npm bin folder .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index f95d7383862..6c023dddd95 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BUGS" "1" "January 2017" "" "" +.TH "NPM\-BUGS" "1" "February 2017" "" "" .SH "NAME" \fBnpm-bugs\fR \- Bugs for a package in a web browser maybe .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-build.1 b/deps/npm/man/man1/npm-build.1 index 5016086c62f..b3a3d984a6e 100644 --- a/deps/npm/man/man1/npm-build.1 +++ b/deps/npm/man/man1/npm-build.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BUILD" "1" "January 2017" "" "" +.TH "NPM\-BUILD" "1" "February 2017" "" "" .SH "NAME" \fBnpm-build\fR \- Build a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-bundle.1 b/deps/npm/man/man1/npm-bundle.1 index f42848ce47f..84fdac2677b 100644 --- a/deps/npm/man/man1/npm-bundle.1 +++ b/deps/npm/man/man1/npm-bundle.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BUNDLE" "1" "January 2017" "" "" +.TH "NPM\-BUNDLE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-bundle\fR \- REMOVED .SH DESCRIPTION diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index 6ece7506032..c44e1081fc9 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM\-CACHE" "1" "January 2017" "" "" +.TH "NPM\-CACHE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-cache\fR \- Manipulates packages cache .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index f3e72de7100..a861d751e31 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM\-COMPLETION" "1" "January 2017" "" "" +.TH "NPM\-COMPLETION" "1" "February 2017" "" "" .SH "NAME" \fBnpm-completion\fR \- Tab Completion for npm .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index b6088f4b42b..6667aebb77a 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM\-CONFIG" "1" "January 2017" "" "" +.TH "NPM\-CONFIG" "1" "February 2017" "" "" .SH "NAME" \fBnpm-config\fR \- Manage the npm configuration files .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 964aa929aaa..317dac0523d 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DEDUPE" "1" "January 2017" "" "" +.TH "NPM\-DEDUPE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-dedupe\fR \- Reduce duplication .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index 6408c1095de..19293be079f 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DEPRECATE" "1" "January 2017" "" "" +.TH "NPM\-DEPRECATE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-deprecate\fR \- Deprecate a version of a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index 418a383d2b5..31b287fa6f3 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DIST\-TAG" "1" "January 2017" "" "" +.TH "NPM\-DIST\-TAG" "1" "February 2017" "" "" .SH "NAME" \fBnpm-dist-tag\fR \- Modify package distribution tags .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index 89d3cfb5b24..b8595d8c799 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DOCS" "1" "January 2017" "" "" +.TH "NPM\-DOCS" "1" "February 2017" "" "" .SH "NAME" \fBnpm-docs\fR \- Docs for a package in a web browser maybe .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index 72ec355e472..d6fe7261a74 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DOCTOR" "1" "January 2017" "" "" +.TH "NPM\-DOCTOR" "1" "February 2017" "" "" .SH "NAME" \fBnpm-doctor\fR \- Check your environments .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index 6476986e929..bb46a86b5a3 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EDIT" "1" "January 2017" "" "" +.TH "NPM\-EDIT" "1" "February 2017" "" "" .SH "NAME" \fBnpm-edit\fR \- Edit an installed package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 0d9ec036a74..2716637d346 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EXPLORE" "1" "January 2017" "" "" +.TH "NPM\-EXPLORE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-explore\fR \- Browse an installed package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 5b5412f22ed..a4337630fed 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM\-HELP\-SEARCH" "1" "January 2017" "" "" +.TH "NPM\-HELP\-SEARCH" "1" "February 2017" "" "" .SH "NAME" \fBnpm-help-search\fR \- Search npm help documentation .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index bbdb7a1f6be..7997c03b2fc 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM\-HELP" "1" "January 2017" "" "" +.TH "NPM\-HELP" "1" "February 2017" "" "" .SH "NAME" \fBnpm-help\fR \- Get help on npm .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 246516e8b40..7a89ee32f48 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INIT" "1" "January 2017" "" "" +.TH "NPM\-INIT" "1" "February 2017" "" "" .SH "NAME" \fBnpm-init\fR \- Interactively create a package\.json file .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index b68e51af87e..b10135c3b0b 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM" "" "January 2017" "" "" +.TH "NPM" "" "February 2017" "" "" .SH "NAME" \fBnpm\fR .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 7fed7744662..ef483c88a1d 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INSTALL" "1" "January 2017" "" "" +.TH "NPM\-INSTALL" "1" "February 2017" "" "" .SH "NAME" \fBnpm-install\fR \- Install a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 9d8eb459d4c..8c071ae7b49 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LINK" "1" "January 2017" "" "" +.TH "NPM\-LINK" "1" "February 2017" "" "" .SH "NAME" \fBnpm-link\fR \- Symlink a package folder .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index e79ccba5a86..634ce0970e5 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LOGOUT" "1" "January 2017" "" "" +.TH "NPM\-LOGOUT" "1" "February 2017" "" "" .SH "NAME" \fBnpm-logout\fR \- Log out of the registry .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 6ce7bd5bc2e..9d96de5afc8 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LS" "1" "January 2017" "" "" +.TH "NPM\-LS" "1" "February 2017" "" "" .SH "NAME" \fBnpm-ls\fR \- List installed packages .SH SYNOPSIS @@ -22,7 +22,7 @@ For example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@4.1.2 /path/to/npm +npm@4.2.0 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index d324f09f821..4304a4ed1b2 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM\-OUTDATED" "1" "January 2017" "" "" +.TH "NPM\-OUTDATED" "1" "February 2017" "" "" .SH "NAME" \fBnpm-outdated\fR \- Check for outdated packages .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index 62d4a85f087..cf48a32062b 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM\-OWNER" "1" "January 2017" "" "" +.TH "NPM\-OWNER" "1" "February 2017" "" "" .SH "NAME" \fBnpm-owner\fR \- Manage package owners .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index 623dec83a4c..5e9b3eee5b3 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PACK" "1" "January 2017" "" "" +.TH "NPM\-PACK" "1" "February 2017" "" "" .SH "NAME" \fBnpm-pack\fR \- Create a tarball from a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 24262c25804..518e76f0577 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PING" "1" "January 2017" "" "" +.TH "NPM\-PING" "1" "February 2017" "" "" .SH "NAME" \fBnpm-ping\fR \- Ping npm registry .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index da1569e50e2..2f34f8cc535 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PREFIX" "1" "January 2017" "" "" +.TH "NPM\-PREFIX" "1" "February 2017" "" "" .SH "NAME" \fBnpm-prefix\fR \- Display prefix .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 895327b41e2..eb25bc604f5 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PRUNE" "1" "January 2017" "" "" +.TH "NPM\-PRUNE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-prune\fR \- Remove extraneous packages .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index cd3ddf45aea..f2bde252780 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PUBLISH" "1" "January 2017" "" "" +.TH "NPM\-PUBLISH" "1" "February 2017" "" "" .SH "NAME" \fBnpm-publish\fR \- Publish a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 16cbea997d0..c708c0b9848 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM\-REBUILD" "1" "January 2017" "" "" +.TH "NPM\-REBUILD" "1" "February 2017" "" "" .SH "NAME" \fBnpm-rebuild\fR \- Rebuild a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index e8b6f6e8f07..1aa5e949be8 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM\-REPO" "1" "January 2017" "" "" +.TH "NPM\-REPO" "1" "February 2017" "" "" .SH "NAME" \fBnpm-repo\fR \- Open package repository page in the browser .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index b326b4f9744..13748d1d580 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM\-RESTART" "1" "January 2017" "" "" +.TH "NPM\-RESTART" "1" "February 2017" "" "" .SH "NAME" \fBnpm-restart\fR \- Restart a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 3f9fd2a2b80..a3469bbb921 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ROOT" "1" "January 2017" "" "" +.TH "NPM\-ROOT" "1" "February 2017" "" "" .SH "NAME" \fBnpm-root\fR \- Display npm root .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index 471a82425cb..5328771582b 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM\-RUN\-SCRIPT" "1" "January 2017" "" "" +.TH "NPM\-RUN\-SCRIPT" "1" "February 2017" "" "" .SH "NAME" \fBnpm-run-script\fR \- Run arbitrary package scripts .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index bdd3028bc7a..b5b35c7f686 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM\-SEARCH" "1" "January 2017" "" "" +.TH "NPM\-SEARCH" "1" "February 2017" "" "" .SH "NAME" \fBnpm-search\fR \- Search for packages .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index ddf341356f8..96205fd6c06 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM\-SHRINKWRAP" "1" "January 2017" "" "" +.TH "NPM\-SHRINKWRAP" "1" "February 2017" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR \- Lock down dependency versions .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index fde504ee600..222e12d4a01 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STAR" "1" "January 2017" "" "" +.TH "NPM\-STAR" "1" "February 2017" "" "" .SH "NAME" \fBnpm-star\fR \- Mark your favorite packages .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index a2e44c9bc4c..daad14a186c 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STARS" "1" "January 2017" "" "" +.TH "NPM\-STARS" "1" "February 2017" "" "" .SH "NAME" \fBnpm-stars\fR \- View packages marked as favorites .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 9571756354f..466ad399481 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM\-START" "1" "January 2017" "" "" +.TH "NPM\-START" "1" "February 2017" "" "" .SH "NAME" \fBnpm-start\fR \- Start a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 4c2d64802ef..6646abc28ad 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STOP" "1" "January 2017" "" "" +.TH "NPM\-STOP" "1" "February 2017" "" "" .SH "NAME" \fBnpm-stop\fR \- Stop a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index 88a71c41bdb..5e349fc4f99 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM\-TEAM" "1" "January 2017" "" "" +.TH "NPM\-TEAM" "1" "February 2017" "" "" .SH "NAME" \fBnpm-team\fR \- Manage organization teams and team memberships .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index ee2f896ec5b..46adee17dba 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM\-TEST" "1" "January 2017" "" "" +.TH "NPM\-TEST" "1" "February 2017" "" "" .SH "NAME" \fBnpm-test\fR \- Test a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index fb89c977f1e..25d1e0f8ef3 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UNINSTALL" "1" "January 2017" "" "" +.TH "NPM\-UNINSTALL" "1" "February 2017" "" "" .SH "NAME" \fBnpm-uninstall\fR \- Remove a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index ff7d0f7c120..0d116aace92 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UNPUBLISH" "1" "January 2017" "" "" +.TH "NPM\-UNPUBLISH" "1" "February 2017" "" "" .SH "NAME" \fBnpm-unpublish\fR \- Remove a package from the registry .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index 4ba2029e5ee..c0232918c8f 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UPDATE" "1" "January 2017" "" "" +.TH "NPM\-UPDATE" "1" "February 2017" "" "" .SH "NAME" \fBnpm-update\fR \- Update a package .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index b5b65dc5b63..498abaaa2fc 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM\-VERSION" "1" "January 2017" "" "" +.TH "NPM\-VERSION" "1" "February 2017" "" "" .SH "NAME" \fBnpm-version\fR \- Bump a package version .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index 4913e0bd42b..065050a24a4 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM\-VIEW" "1" "January 2017" "" "" +.TH "NPM\-VIEW" "1" "February 2017" "" "" .SH "NAME" \fBnpm-view\fR \- View registry info .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 21207f0f786..e21e8144583 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM\-WHOAMI" "1" "January 2017" "" "" +.TH "NPM\-WHOAMI" "1" "February 2017" "" "" .SH "NAME" \fBnpm-whoami\fR \- Display npm username .SH SYNOPSIS diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 71856669b13..e473ee6d7ad 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "January 2017" "" "" +.TH "NPM" "1" "February 2017" "" "" .SH "NAME" \fBnpm\fR \- javascript package manager .SH SYNOPSIS @@ -10,7 +10,7 @@ npm [args] .RE .SH VERSION .P -4.1.2 +4.2.0 .SH DESCRIPTION .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/man/man5/npm-folders.5 b/deps/npm/man/man5/npm-folders.5 index 93ab0441319..b96a5b44e2f 100644 --- a/deps/npm/man/man5/npm-folders.5 +++ b/deps/npm/man/man5/npm-folders.5 @@ -1,4 +1,4 @@ -.TH "NPM\-FOLDERS" "5" "January 2017" "" "" +.TH "NPM\-FOLDERS" "5" "February 2017" "" "" .SH "NAME" \fBnpm-folders\fR \- Folder Structures Used by npm .SH DESCRIPTION diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 93ab0441319..b96a5b44e2f 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "NPM\-FOLDERS" "5" "January 2017" "" "" +.TH "NPM\-FOLDERS" "5" "February 2017" "" "" .SH "NAME" \fBnpm-folders\fR \- Folder Structures Used by npm .SH DESCRIPTION diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index 5bb1b64f410..004ff000949 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE\.JSON" "5" "January 2017" "" "" +.TH "PACKAGE\.JSON" "5" "February 2017" "" "" .SH "NAME" \fBpackage.json\fR \- Specifics of npm's package\.json handling .SH DESCRIPTION @@ -142,7 +142,7 @@ array of license objects: // Not valid metadata { "license" : { "type" : "ISC" - , "url" : "http://opensource\.org/licenses/ISC" + , "url" : "https://opensource\.org/licenses/ISC" } } @@ -150,10 +150,10 @@ array of license objects: { "licenses" : [ { "type": "MIT" - , "url": "http://www\.opensource\.org/licenses/mit\-license\.php" + , "url": "https://www\.opensource\.org/licenses/mit\-license\.php" } , { "type": "Apache\-2\.0" - , "url": "http://opensource\.org/licenses/apache2\.0\.php" + , "url": "https://opensource\.org/licenses/apache2\.0\.php" } ] } diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index e47da0d5167..5649907f750 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "January 2017" "" "" +.TH "NPMRC" "5" "February 2017" "" "" .SH "NAME" \fBnpmrc\fR \- The npm config files .SH DESCRIPTION diff --git a/deps/npm/man/man5/package.json.5 b/deps/npm/man/man5/package.json.5 index 5bb1b64f410..004ff000949 100644 --- a/deps/npm/man/man5/package.json.5 +++ b/deps/npm/man/man5/package.json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE\.JSON" "5" "January 2017" "" "" +.TH "PACKAGE\.JSON" "5" "February 2017" "" "" .SH "NAME" \fBpackage.json\fR \- Specifics of npm's package\.json handling .SH DESCRIPTION @@ -142,7 +142,7 @@ array of license objects: // Not valid metadata { "license" : { "type" : "ISC" - , "url" : "http://opensource\.org/licenses/ISC" + , "url" : "https://opensource\.org/licenses/ISC" } } @@ -150,10 +150,10 @@ array of license objects: { "licenses" : [ { "type": "MIT" - , "url": "http://www\.opensource\.org/licenses/mit\-license\.php" + , "url": "https://www\.opensource\.org/licenses/mit\-license\.php" } , { "type": "Apache\-2\.0" - , "url": "http://opensource\.org/licenses/apache2\.0\.php" + , "url": "https://opensource\.org/licenses/apache2\.0\.php" } ] } diff --git a/deps/npm/man/man7/npm-coding-style.7 b/deps/npm/man/man7/npm-coding-style.7 index d6854ea9efa..93caeb1fc22 100644 --- a/deps/npm/man/man7/npm-coding-style.7 +++ b/deps/npm/man/man7/npm-coding-style.7 @@ -1,4 +1,4 @@ -.TH "NPM\-CODING\-STYLE" "7" "January 2017" "" "" +.TH "NPM\-CODING\-STYLE" "7" "February 2017" "" "" .SH "NAME" \fBnpm-coding-style\fR \- npm's "funny" coding style .SH DESCRIPTION diff --git a/deps/npm/man/man7/npm-config.7 b/deps/npm/man/man7/npm-config.7 index 13cd8676301..23d3ca56ffa 100644 --- a/deps/npm/man/man7/npm-config.7 +++ b/deps/npm/man/man7/npm-config.7 @@ -1,4 +1,4 @@ -.TH "NPM\-CONFIG" "7" "January 2017" "" "" +.TH "NPM\-CONFIG" "7" "February 2017" "" "" .SH "NAME" \fBnpm-config\fR \- More than you probably want to know about npm configuration .SH DESCRIPTION @@ -798,6 +798,16 @@ stderr\. .P If the \fBcolor\fP config is set to true, then this stream will receive colored output if it is a TTY\. +.SS logs\-max +.RS 0 +.IP \(bu 2 +Default: 10 +.IP \(bu 2 +Type: Number + +.RE +.P +The maximum number of log files to store\. .SS long .RS 0 .IP \(bu 2 @@ -1151,6 +1161,17 @@ Type: String .RE .P Space\-separated options that are always passed to search\. +.SS searchlimit +.RS 0 +.IP \(bu 2 +Default: 20 +.IP \(bu 2 +Type: Number + +.RE +.P +Number of items to limit search results to\. Will not apply at all to legacy +searches\. .SS searchstaleness .RS 0 .IP \(bu 2 @@ -1160,7 +1181,8 @@ Type: Number .RE .P -The age of the cache, in seconds, before another registry request is made\. +The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint\. .SS send\-metrics .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man7/npm-developers.7 b/deps/npm/man/man7/npm-developers.7 index a56bd183b4e..9e908846a2f 100644 --- a/deps/npm/man/man7/npm-developers.7 +++ b/deps/npm/man/man7/npm-developers.7 @@ -1,4 +1,4 @@ -.TH "NPM\-DEVELOPERS" "7" "January 2017" "" "" +.TH "NPM\-DEVELOPERS" "7" "February 2017" "" "" .SH "NAME" \fBnpm-developers\fR \- Developer Guide .SH DESCRIPTION diff --git a/deps/npm/man/man7/npm-disputes.7 b/deps/npm/man/man7/npm-disputes.7 index fc0634dfba5..f347da46f07 100644 --- a/deps/npm/man/man7/npm-disputes.7 +++ b/deps/npm/man/man7/npm-disputes.7 @@ -1,4 +1,4 @@ -.TH "NPM\-DISPUTES" "7" "January 2017" "" "" +.TH "NPM\-DISPUTES" "7" "February 2017" "" "" .SH "NAME" \fBnpm-disputes\fR \- Handling Module Name Disputes .P diff --git a/deps/npm/man/man7/npm-index.7 b/deps/npm/man/man7/npm-index.7 index e70504241ae..36a1e2d12a0 100644 --- a/deps/npm/man/man7/npm-index.7 +++ b/deps/npm/man/man7/npm-index.7 @@ -1,4 +1,4 @@ -.TH "NPM\-INDEX" "7" "January 2017" "" "" +.TH "NPM\-INDEX" "7" "February 2017" "" "" .SH "NAME" \fBnpm-index\fR \- Index of all npm documentation .SS npm help README diff --git a/deps/npm/man/man7/npm-orgs.7 b/deps/npm/man/man7/npm-orgs.7 index 5948bf85304..278b766a9e9 100644 --- a/deps/npm/man/man7/npm-orgs.7 +++ b/deps/npm/man/man7/npm-orgs.7 @@ -1,4 +1,4 @@ -.TH "NPM\-ORGS" "7" "January 2017" "" "" +.TH "NPM\-ORGS" "7" "February 2017" "" "" .SH "NAME" \fBnpm-orgs\fR \- Working with Teams & Orgs .SH DESCRIPTION diff --git a/deps/npm/man/man7/npm-registry.7 b/deps/npm/man/man7/npm-registry.7 index 639395eee65..83fcd4beeb0 100644 --- a/deps/npm/man/man7/npm-registry.7 +++ b/deps/npm/man/man7/npm-registry.7 @@ -1,4 +1,4 @@ -.TH "NPM\-REGISTRY" "7" "January 2017" "" "" +.TH "NPM\-REGISTRY" "7" "February 2017" "" "" .SH "NAME" \fBnpm-registry\fR \- The JavaScript Package Registry .SH DESCRIPTION diff --git a/deps/npm/man/man7/npm-scope.7 b/deps/npm/man/man7/npm-scope.7 index 5d362460c6a..be3a322b5cb 100644 --- a/deps/npm/man/man7/npm-scope.7 +++ b/deps/npm/man/man7/npm-scope.7 @@ -1,12 +1,12 @@ -.TH "NPM\-SCOPE" "7" "January 2017" "" "" +.TH "NPM\-SCOPE" "7" "February 2017" "" "" .SH "NAME" \fBnpm-scope\fR \- Scoped packages .SH DESCRIPTION .P All npm packages have a name\. Some package names also have a scope\. A scope -follows the usual rules for package names (url\-safe characters, no leading dots -or underscores)\. When used in package names, preceded by an @\-symbol and -followed by a slash, e\.g\. +follows the usual rules for package names (URL\-safe characters, no leading dots +or underscores)\. When used in package names, scopes are preceded by an \fB@\fP symbol +and followed by a slash, e\.g\. .P .RS 2 .nf @@ -17,20 +17,25 @@ followed by a slash, e\.g\. Scopes are a way of grouping related packages together, and also affect a few things about the way npm treats the package\. .P +Each npm user/organization has their own scope, and only you can add packages +in your scope\. This means you don't have to worry about someone taking your +package name ahead of you\. Thus it is also a good way to signal official packages +for organizations\. +.P Scoped packages can be published and installed as of \fBnpm@2\fP and are supported -by the primary npm registry\. The npm client is backwards\-compatible with -un\-scoped registries, so it can be used to work with scoped and un\-scoped -registries at the same time\. +by the primary npm registry\. Unscoped packages can depend on scoped packages and +vice versa\. The npm client is backwards\-compatible with unscoped registries, +so it can be used to work with scoped and unscoped registries at the same time\. .SH Installing scoped packages .P Scoped packages are installed to a sub\-folder of the regular installation folder, e\.g\. if your other packages are installed in \fBnode_modules/packagename\fP, -scoped modules will be in \fBnode_modules/@myorg/packagename\fP\|\. The scope folder -(\fB@myorg\fP) is simply the name of the scope preceded by an @\-symbol, and can +scoped modules will be installed in \fBnode_modules/@myorg/packagename\fP\|\. The scope +folder (\fB@myorg\fP) is simply the name of the scope preceded by an \fB@\fP symbol, and can contain any number of scoped packages\. .P A scoped package is installed by referencing it by name, preceded by an -@\-symbol, in \fBnpm install\fP: +\fB@\fP symbol, in \fBnpm install\fP: .P .RS 2 .nf @@ -48,7 +53,7 @@ Or in \fBpackage\.json\fP: .fi .RE .P -Note that if the @\-symbol is omitted in either case npm will instead attempt to +Note that if the \fB@\fP symbol is omitted, in either case, npm will instead attempt to install from GitHub; see npm help \fBnpm\-install\fP\|\. .SH Requiring scoped packages .P @@ -61,8 +66,8 @@ require('@myorg/mypackage') .fi .RE .P -There is nothing special about the way Node treats scope folders, this is -just specifying to require the module \fBmypackage\fP in the folder called \fB@myorg\fP\|\. +There is nothing special about the way Node treats scope folders\. This +simply requires the \fBmypackage\fP module in the folder named \fB@myorg\fP\|\. .SH Publishing scoped packages .P Scoped packages can be published from the CLI as of \fBnpm@2\fP and can be @@ -70,13 +75,13 @@ published to any registry that supports them, including the primary npm registry\. .P (As of 2015\-04\-19, and with npm 2\.0 or better, the primary npm registry -\fBdoes\fR support scoped packages) +\fBdoes\fR support scoped packages\.) .P If you wish, you may associate a scope with a registry; see below\. .SS Publishing public scoped packages to the primary npm registry .P To publish a public scoped package, you must specify \fB\-\-access public\fP with -the initial publication\. This will publish the package and set access +the initial publication\. This will publish the package and set access to \fBpublic\fP as if you had run \fBnpm access public\fP after publishing\. .SS Publishing private scoped packages to the npm registry .P @@ -86,7 +91,7 @@ account\. .P You can then publish the module with \fBnpm publish\fP or \fBnpm publish \-\-access restricted\fP, and it will be present in the npm registry, with -restricted access\. You can then change the access permissions, if +restricted access\. You can then change the access permissions, if desired, with \fBnpm access\fP or on the npmjs\.com website\. .SH Associating a scope with a registry .P @@ -125,6 +130,8 @@ npm help install npm help publish .IP \(bu 2 npm help access +.IP \(bu 2 +npm help 7 registry .RE diff --git a/deps/npm/man/man7/npm-scripts.7 b/deps/npm/man/man7/npm-scripts.7 index 4f8554367aa..d28e9a1b53d 100644 --- a/deps/npm/man/man7/npm-scripts.7 +++ b/deps/npm/man/man7/npm-scripts.7 @@ -1,4 +1,4 @@ -.TH "NPM\-SCRIPTS" "7" "January 2017" "" "" +.TH "NPM\-SCRIPTS" "7" "February 2017" "" "" .SH "NAME" \fBnpm-scripts\fR \- How npm handles the "scripts" field .SH DESCRIPTION diff --git a/deps/npm/man/man7/removing-npm.7 b/deps/npm/man/man7/removing-npm.7 index f6df37784c6..fcc488d3898 100644 --- a/deps/npm/man/man7/removing-npm.7 +++ b/deps/npm/man/man7/removing-npm.7 @@ -1,4 +1,4 @@ -.TH "NPM\-REMOVAL" "1" "January 2017" "" "" +.TH "NPM\-REMOVAL" "1" "February 2017" "" "" .SH "NAME" \fBnpm-removal\fR \- Cleaning the Slate .SH SYNOPSIS diff --git a/deps/npm/man/man7/semver.7 b/deps/npm/man/man7/semver.7 index e71137f4739..1c452d2bb75 100644 --- a/deps/npm/man/man7/semver.7 +++ b/deps/npm/man/man7/semver.7 @@ -1,4 +1,4 @@ -.TH "SEMVER" "7" "January 2017" "" "" +.TH "SEMVER" "7" "February 2017" "" "" .SH "NAME" \fBsemver\fR \- The semantic versioner for npm .SH Usage diff --git a/deps/npm/node_modules/ansi-regex/index.js b/deps/npm/node_modules/ansi-regex/index.js index 4906755bc93..b9574ed7e82 100644 --- a/deps/npm/node_modules/ansi-regex/index.js +++ b/deps/npm/node_modules/ansi-regex/index.js @@ -1,4 +1,4 @@ 'use strict'; module.exports = function () { - return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; }; diff --git a/deps/npm/node_modules/ansi-regex/package.json b/deps/npm/node_modules/ansi-regex/package.json index 36b92255d4d..8ce656deb38 100644 --- a/deps/npm/node_modules/ansi-regex/package.json +++ b/deps/npm/node_modules/ansi-regex/package.json @@ -1,39 +1,84 @@ { - "name": "ansi-regex", - "version": "2.0.0", - "description": "Regular expression for matching ANSI escape codes", - "license": "MIT", - "repository": { - "type": "git", - "url": "git+https://github.com/sindresorhus/ansi-regex.git" + "_args": [ + [ + { + "raw": "ansi-regex@2.1.1", + "scope": null, + "escapedName": "ansi-regex", + "name": "ansi-regex", + "rawSpec": "2.1.1", + "spec": "2.1.1", + "type": "version" + }, + "/Users/zkat/Documents/code/npm" + ] + ], + "_from": "ansi-regex@2.1.1", + "_id": "ansi-regex@2.1.1", + "_inCache": true, + "_location": "/ansi-regex", + "_nodeVersion": "0.10.32", + "_npmOperationalInternal": { + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/ansi-regex-2.1.1.tgz_1484363378013_0.4482989883981645" + }, + "_npmUser": { + "name": "qix", + "email": "i.am.qix@gmail.com" + }, + "_npmVersion": "2.14.2", + "_phantomChildren": {}, + "_requested": { + "raw": "ansi-regex@2.1.1", + "scope": null, + "escapedName": "ansi-regex", + "name": "ansi-regex", + "rawSpec": "2.1.1", + "spec": "2.1.1", + "type": "version" }, + "_requiredBy": [ + "#USER", + "/", + "/npm-registry-couchapp/couchapp/nano/follow/request/har-validator/chalk/has-ansi", + "/request/har-validator/chalk/has-ansi", + "/standard/eslint/chalk/has-ansi", + "/standard/eslint/inquirer", + "/strip-ansi", + "/tap/coveralls/request/har-validator/chalk/has-ansi" + ], + "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "_shasum": "c3b33ab5ee360d86e0e628f0468ae7ef27d654df", + "_shrinkwrap": null, + "_spec": "ansi-regex@2.1.1", + "_where": "/Users/zkat/Documents/code/npm", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", "url": "sindresorhus.com" }, - "maintainers": [ - { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - { - "name": "Joshua Appelman", - "email": "jappelman@xebia.com", - "url": "jbnicolai.com" - } - ], + "bugs": { + "url": "https://github.com/chalk/ansi-regex/issues" + }, + "dependencies": {}, + "description": "Regular expression for matching ANSI escape codes", + "devDependencies": { + "ava": "0.17.0", + "xo": "0.16.0" + }, + "directories": {}, + "dist": { + "shasum": "c3b33ab5ee360d86e0e628f0468ae7ef27d654df", + "tarball": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz" + }, "engines": { "node": ">=0.10.0" }, - "scripts": { - "test": "mocha test/test.js", - "view-supported": "node test/viewCodes.js" - }, "files": [ "index.js" ], + "gitHead": "7c908e7b4eb6cd82bfe1295e33fdf6d166c7ed85", + "homepage": "https://github.com/chalk/ansi-regex#readme", "keywords": [ "ansi", "styles", @@ -61,17 +106,33 @@ "find", "pattern" ], - "devDependencies": { - "mocha": "*" + "license": "MIT", + "maintainers": [ + { + "name": "qix", + "email": "i.am.qix@gmail.com" + }, + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "ansi-regex", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-regex.git" }, - "readme": "# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)\n\n> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)\n\n\n## Install\n\n```\n$ npm install --save ansi-regex\n```\n\n\n## Usage\n\n```js\nvar ansiRegex = require('ansi-regex');\n\nansiRegex().test('\\u001b[4mcake\\u001b[0m');\n//=> true\n\nansiRegex().test('cake');\n//=> false\n\n'\\u001b[4mcake\\u001b[0m'.match(ansiRegex());\n//=> ['\\u001b[4m', '\\u001b[0m']\n```\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n", - "readmeFilename": "readme.md", - "bugs": { - "url": "https://github.com/sindresorhus/ansi-regex/issues" + "scripts": { + "test": "xo && ava --verbose", + "view-supported": "node fixtures/view-codes.js" }, - "homepage": "https://github.com/sindresorhus/ansi-regex#readme", - "_id": "ansi-regex@2.0.0", - "_shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107", - "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", - "_from": "ansi-regex@2.0.0" + "version": "2.1.1", + "xo": { + "rules": { + "guard-for-in": 0, + "no-loop-func": 0 + } + } } diff --git a/deps/npm/node_modules/ansi-regex/readme.md b/deps/npm/node_modules/ansi-regex/readme.md index 1a4894ec111..6a928edf0f6 100644 --- a/deps/npm/node_modules/ansi-regex/readme.md +++ b/deps/npm/node_modules/ansi-regex/readme.md @@ -1,4 +1,4 @@ -# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex) +# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex) > Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) @@ -13,7 +13,7 @@ $ npm install --save ansi-regex ## Usage ```js -var ansiRegex = require('ansi-regex'); +const ansiRegex = require('ansi-regex'); ansiRegex().test('\u001b[4mcake\u001b[0m'); //=> true @@ -25,6 +25,14 @@ ansiRegex().test('cake'); //=> ['\u001b[4m', '\u001b[0m'] ``` +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + ## License diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.npmignore b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.npmignore deleted file mode 100644 index 38344f87a62..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.travis.yml deleted file mode 100644 index 1b82118460c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - npm install -g npm -notifications: - email: false -matrix: - fast_finish: true - allow_failures: - - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - include: - - node_js: '0.8' - env: TASK=test - - node_js: '0.10' - env: TASK=test - - node_js: '0.11' - env: TASK=test - - node_js: '0.12' - env: TASK=test - - node_js: 1 - env: TASK=test - - node_js: 2 - env: TASK=test - - node_js: 3 - env: TASK=test - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" -script: "npm run $TASK" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml deleted file mode 100644 index 96d9cfbd386..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/.zuul.yml +++ /dev/null @@ -1 +0,0 @@ -ui: tape diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/LICENSE deleted file mode 100644 index e3d4e695a4c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/README.md deleted file mode 100644 index 1a67c48cd03..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# readable-stream - -***Node-core v5.8.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.markdown). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams WG Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown deleted file mode 100644 index 0bc3819e63b..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/stream.markdown +++ /dev/null @@ -1,1760 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface implemented by various objects in -Node.js. For example a [request to an HTTP server][http-incoming-message] is a -stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All -streams are instances of [`EventEmitter`][]. - -You can load the Stream base classes by doing `require('stream')`. -There are base classes provided for [Readable][] streams, [Writable][] -streams, [Duplex][] streams, and [Transform][] streams. - -This document is split up into 3 sections: - -1. The first section explains the parts of the API that you need to be - aware of to use streams in your programs. -2. The second section explains the parts of the API that you need to - use if you implement your own custom streams yourself. The API is designed to - make this easy for you to do. -3. The third section goes into more depth about how streams work, - including some of the internal mechanisms and functions that you - should probably not modify unless you definitely know what you are - doing. - - -## API for Stream Consumers - - - -Streams can be either [Readable][], [Writable][], or both ([Duplex][]). - -All streams are EventEmitters, but they also have other custom methods -and properties depending on whether they are Readable, Writable, or -Duplex. - -If a stream is both Readable and Writable, then it implements all of -the methods and events. So, a [Duplex][] or [Transform][] stream is -fully described by this API, though their implementation may be -somewhat different. - -It is not necessary to implement Stream interfaces in order to consume -streams in your programs. If you **are** implementing streaming -interfaces in your own program, please also refer to -[API for Stream Implementors][]. - -Almost all Node.js programs, no matter how simple, use Streams in some -way. Here is an example of using Streams in an Node.js program: - -```js -const http = require('http'); - -var server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - var body = ''; - // we want to get the data as utf8 strings - // If you don't set an encoding, then you'll get Buffer objects - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event tells you that you have entire body - req.on('end', () => { - try { - var data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -### Class: stream.Duplex - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Readable - - - -The Readable stream interface is the abstraction for a *source* of -data that you are reading from. In other words, data comes *out* of a -Readable stream. - -A Readable stream will not start emitting data until you indicate that -you are ready to receive it. - -Readable streams have two "modes": a **flowing mode** and a **paused -mode**. When in flowing mode, data is read from the underlying system -and provided to your program as fast as possible. In paused mode, you -must explicitly call [`stream.read()`][stream-read] to get chunks of data out. -Streams start out in paused mode. - -**Note**: If no data event handlers are attached, and there are no -[`stream.pipe()`][] destinations, and the stream is switched into flowing -mode, then data will be lost. - -You can switch to flowing mode by doing any of the following: - -* Adding a [`'data'`][] event handler to listen for data. -* Calling the [`stream.resume()`][stream-resume] method to explicitly open the - flow. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -You can switch back to paused mode by doing either of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -Note that, for backwards compatibility reasons, removing [`'data'`][] -event handlers will **not** automatically pause the stream. Also, if -there are piped destinations, then calling [`stream.pause()`][stream-pause] will -not guarantee that the stream will *remain* paused once those -destinations drain and ask for more data. - -Examples of readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -#### Event: 'close' - -Emitted when the stream and any of its underlying resources (a file -descriptor, for example) have been closed. The event indicates that -no more events will be emitted, and no further computation will occur. - -Not all streams will emit the `'close'` event. - -#### Event: 'data' - -* `chunk` {Buffer|String} The chunk of data. - -Attaching a `'data'` event listener to a stream that has not been -explicitly paused will switch the stream into flowing mode. Data will -then be passed as soon as it is available. - -If you just want to get all the data out of the stream as fast as -possible, this is the best way to do so. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -``` - -#### Event: 'end' - -This event fires when there will be no more data to read. - -Note that the `'end'` event **will not fire** unless the data is -completely consumed. This can be done by switching into flowing mode, -or by calling [`stream.read()`][stream-read] repeatedly until you get to the -end. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -readable.on('end', () => { - console.log('there will be no more data.'); -}); -``` - -#### Event: 'error' - -* {Error Object} - -Emitted if there was an error receiving data. - -#### Event: 'readable' - -When a chunk of data can be read from the stream, it will emit a -`'readable'` event. - -In some cases, listening for a `'readable'` event will cause some data -to be read into the internal buffer from the underlying system, if it -hadn't already. - -```javascript -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` - -Once the internal buffer is drained, a `'readable'` event will fire -again when more data is available. - -The `'readable'` event is not emitted in the "flowing" mode with the -sole exception of the last one, on end-of-stream. - -The `'readable'` event indicates that the stream has new information: -either new data is available or the end of the stream has been reached. -In the former case, [`stream.read()`][stream-read] will return that data. In the -latter case, [`stream.read()`][stream-read] will return null. For instance, in -the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -var rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -#### readable.isPaused() - -* Return: {Boolean} - -This method returns whether or not the `readable` has been **explicitly** -paused by client code (using [`stream.pause()`][stream-pause] without a -corresponding [`stream.resume()`][stream-resume]). - -```js -var readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -#### readable.pause() - -* Return: `this` - -This method will cause a stream in flowing mode to stop emitting -[`'data'`][] events, switching out of flowing mode. Any data that becomes -available will remain in the internal buffer. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); - readable.pause(); - console.log('there will be no more data for 1 second'); - setTimeout(() => { - console.log('now data will start flowing again'); - readable.resume(); - }, 1000); -}); -``` - -#### readable.pipe(destination[, options]) - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Default = `true` - -This method pulls all the data out of a readable stream, and writes it -to the supplied destination, automatically managing the flow so that -the destination is not overwhelmed by a fast readable stream. - -Multiple destinations can be piped to safely. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` - -This function returns the destination stream, so you can set up pipe -chains like so: - -```js -var r = fs.createReadStream('file.txt'); -var z = zlib.createGzip(); -var w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -For example, emulating the Unix `cat` command: - -```js -process.stdin.pipe(process.stdout); -``` - -By default [`stream.end()`][stream-end] is called on the destination when the -source stream emits [`'end'`][], so that `destination` is no longer writable. -Pass `{ end: false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -Note that [`process.stderr`][] and [`process.stdout`][] are never closed until -the process exits, regardless of the specified options. - -#### readable.read([size]) - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `read()` method pulls some data out of the internal buffer and -returns it. If there is no data available, then it will return -`null`. - -If you pass in a `size` argument, then it will return that many -bytes. If `size` bytes are not available, then it will return `null`, -unless we've ended, in which case it will return the data remaining -in the buffer. - -If you do not specify a `size` argument, then it will return all the -data in the internal buffer. - -This method should only be called in paused mode. In flowing mode, -this method is called automatically until the internal buffer is -drained. - -```js -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log('got %d bytes of data', chunk.length); - } -}); -``` - -If this method returns a data chunk, then it will also trigger the -emission of a [`'data'`][] event. - -Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been triggered will return `null`. No runtime error will be raised. - -#### readable.resume() - -* Return: `this` - -This method will cause the readable stream to resume emitting [`'data'`][] -events. - -This method will switch the stream into flowing mode. If you do *not* -want to consume the data from a stream, but you *do* want to get to -its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open -the flow of data. - -```js -var readable = getReadableStreamSomehow(); -readable.resume(); -readable.on('end', () => { - console.log('got to the end, but did not read anything'); -}); -``` - -#### readable.setEncoding(encoding) - -* `encoding` {String} The encoding to use. -* Return: `this` - -Call this function to cause the stream to return strings of the specified -encoding instead of Buffer objects. For example, if you do -`readable.setEncoding('utf8')`, then the output data will be interpreted as -UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, -then the data will be encoded in hexadecimal string format. - -This properly handles multi-byte characters that would otherwise be -potentially mangled if you simply pulled the Buffers directly and -called [`buf.toString(encoding)`][] on them. If you want to read the data -as strings, always use this method. - -Also you can disable any encoding at all with `readable.setEncoding(null)`. -This approach is very useful if you deal with binary data or with large -multi-byte strings spread out over multiple chunks. - -```js -var readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -#### readable.unpipe([destination]) - -* `destination` {stream.Writable} Optional specific stream to unpipe - -This method will remove the hooks set up for a previous [`stream.pipe()`][] -call. - -If the destination is not specified, then all pipes are removed. - -If the destination is specified, but no pipe is set up for it, then -this is a no-op. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('stop writing to file.txt'); - readable.unpipe(writable); - console.log('manually close the file stream'); - writable.end(); -}, 1000); -``` - -#### readable.unshift(chunk) - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -This is useful in certain cases where a stream is being consumed by a -parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source, so that the stream can be -passed on to some other party. - -Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event -has been triggered; a runtime error will be raised. - -If you find that you must often call `stream.unshift(chunk)` in your -programs, consider implementing a [Transform][] stream instead. (See [API -for Stream Implementors][].) - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - var decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - var remaining = split.join('\n\n'); - var buf = new Buffer(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `unshift()` is called during a -read (i.e. from within a [`stream._read()`][stream-_read] implementation on a -custom stream). Following the call to `unshift()` with an immediate -[`stream.push('')`][stream-push] will reset the reading state appropriately, -however it is best to simply avoid calling `unshift()` while in the process of -performing a read. - -#### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire Streams API as it is today. (See [Compatibility][] for -more information.) - -If you are using an older Node.js library that emits [`'data'`][] events and -has a [`stream.pause()`][stream-pause] method that is advisory only, then you -can use the `wrap()` method to create a [Readable][] stream that uses the old -stream as its data source. - -You will very rarely ever need to call this function, but it exists -as a convenience for interacting with old Node.js programs and libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Class: stream.Transform - -Transform streams are [Duplex][] streams where the output is in some way -computed from the input. They implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Writable - - - -The Writable stream interface is an abstraction for a *destination* -that you are writing data *to*. - -Examples of writable streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -#### Event: 'drain' - -If a [`stream.write(chunk)`][stream-write] call returns `false`, then the -`'drain'` event will indicate when it is appropriate to begin writing more data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - var i = 1000000; - write(); - function write() { - var ok = true; - do { - i -= 1; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -#### Event: 'error' - -* {Error} - -Emitted if there was an error when writing or piping data. - -#### Event: 'finish' - -When the [`stream.end()`][stream-end] method has been called, and all data has -been flushed to the underlying system, this event is emitted. - -```javascript -var writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('this is the end\n'); -writer.on('finish', () => { - console.error('all writes are now complete.'); -}); -``` - -#### Event: 'pipe' - -* `src` {stream.Readable} source stream that is piping to this writable - -This is emitted whenever the [`stream.pipe()`][] method is called on a readable -stream, adding this writable to its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -#### Event: 'unpipe' - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -This is emitted whenever the [`stream.unpipe()`][] method is called on a -readable stream, removing this writable from its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('something has stopped piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -#### writable.cork() - -Forces buffering of all writes. - -Buffered data will be flushed either at [`stream.uncork()`][] or at -[`stream.end()`][stream-end] call. - -#### writable.end([chunk][, encoding][, callback]) - -* `chunk` {String|Buffer} Optional data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Call this method when no more data will be written to the stream. If supplied, -the callback is attached as a listener on the [`'finish'`][] event. - -Calling [`stream.write()`][stream-write] after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -var file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -#### writable.setDefaultEncoding(encoding) - -* `encoding` {String} The new default encoding - -Sets the default encoding for a writable stream. - -#### writable.uncork() - -Flush all data, buffered since [`stream.cork()`][] call. - -#### writable.write(chunk[, encoding][, callback]) - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `true` if the data was handled completely. - -This method writes some data to the underlying system, and calls the -supplied callback once the data has been fully handled. - -The return value indicates if you should continue writing right now. -If the data had to be buffered internally, then it will return -`false`. Otherwise, it will return `true`. - -This return value is strictly advisory. You MAY continue to write, -even if it returns `false`. However, writes will be buffered in -memory, so it is best not to do this excessively. Instead, wait for -the [`'drain'`][] event before writing more data. - - -## API for Stream Implementors - - - -To implement any sort of stream, the pattern is the same: - -1. Extend the appropriate parent class in your own subclass. (The - [`util.inherits()`][] method is particularly helpful for this.) -2. Call the appropriate parent class constructor in your constructor, - to be sure that the internal mechanisms are set up properly. -3. Implement one or more specific methods, as detailed below. - -The class to extend and the method(s) to implement depend on the sort -of stream class you are writing: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable_1)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable_1)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex_1)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform_1)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -In your implementation code, it is very important to never call the methods -described in [API for Stream Consumers][]. Otherwise, you can potentially cause -adverse side effects in programs that consume your streaming interfaces. - -### Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a TCP -socket connection. - -Note that `stream.Duplex` is an abstract class designed to be extended -with an underlying implementation of the [`stream._read(size)`][stream-_read] -and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you -would with a Readable or Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this class -prototypally inherits from Readable, and then parasitically from Writable. It is -thus up to the user to implement both the low-level -[`stream._read(n)`][stream-_read] method as well as the low-level -[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension -duplex classes. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### Class: stream.PassThrough - -This is a trivial implementation of a [Transform][] stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy as a building block for novel sorts of streams. - -### Class: stream.Readable - - - -`stream.Readable` is an abstract class designed to be extended with an -underlying implementation of the [`stream._read(size)`][stream-_read] method. - -Please see [API for Stream Consumers][] for how to consume -streams in your programs. What follows is an explanation of how to -implement Readable streams in your programs. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default = `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default = `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Default = `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -In classes that extend the Readable class, make sure to call the -Readable constructor so that the buffering settings can be properly -initialized. - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **Implement this method, but do NOT call it directly.** - -This method is prefixed with an underscore because it is internal to the -class that defines it and should only be called by the internal Readable -class methods. All Readable stream implementations must provide a \_read -method to fetch data from the underlying resource. - -When `_read()` is called, if data is available from the resource, the `_read()` -implementation should start pushing that data into the read queue by calling -[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from -the resource and pushing data until push returns `false`, at which point it -should stop reading from the resource. Only when `_read()` is called again after -it has stopped should it start reading more data from the resource and pushing -that data onto the queue. - -Note: once the `_read()` method is called, it will not be called again until -the [`stream.push()`][stream-push] method is called. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -#### readable.push(chunk[, encoding]) - - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* return {Boolean} Whether or not more pushes should be performed - -Note: **This method should be called by Readable implementors, NOT -by consumers of Readable streams.** - -If a value other than null is passed, The `push()` method adds a chunk of data -into the queue for subsequent stream processors to consume. If `null` is -passed, it signals the end of the stream (EOF), after which no more data -can be written. - -The data added with `push()` can be pulled out by calling the -[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. - -This API is designed to be as flexible as possible. For example, -you may be wrapping a lower-level source which has some sort of -pause/resume mechanism, and a data callback. In those cases, you -could wrap the low-level source object by doing something like this: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -util.inherits(SourceWrapper, Readable); - -function SourceWrapper(options) { - Readable.call(this, options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, we push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then we need to stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, we push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; -} - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -SourceWrapper.prototype._read = function(size) { - this._source.readStart(); -}; -``` - -#### Example: A Counting Stream - - - -This is a basic example of a Readable stream. It emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; -const util = require('util'); -util.inherits(Counter, Readable); - -function Counter(opt) { - Readable.call(this, opt); - this._max = 1000000; - this._index = 1; -} - -Counter.prototype._read = function() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = new Buffer(str, 'ascii'); - this.push(buf); - } -}; -``` - -#### Example: SimpleProtocol v1 (Sub-optimal) - -This is similar to the `parseHeader` function described -[here](#stream_readable_unshift_chunk), but implemented as a custom stream. -Also, note that this implementation does not convert the incoming data to a -string. - -However, this would be better implemented as a [Transform][] stream. See -[SimpleProtocol v2][] for a better implementation. - -```js -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// NOTE: This can be done more simply as a Transform stream! -// Using Readable directly for this is sub-optimal. See the -// alternative example below under the Transform section. - -const Readable = require('stream').Readable; -const util = require('util'); - -util.inherits(SimpleProtocol, Readable); - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(source, options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', () => { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', () => { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - // calling unshift by itself does not reset the reading state - // of the stream; since we're inside _read, doing an additional - // push('') will reset the state appropriately. - this.push(''); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -// var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a [zlib][] stream or a -[crypto][] stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will produce output -that is either much smaller or much larger than its input. - -Rather than implement the [`stream._read()`][stream-_read] and -[`stream._write()`][stream-_write] methods, Transform classes must implement the -[`stream._transform()`][stream-_transform] method, and may optionally -also implement the [`stream._flush()`][stream-_flush] method. (See below.) - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the parent Writable -and Readable classes respectively. The `'finish'` event is fired after -[`stream.end()`][stream-end] is called and all chunks have been processed by -[`stream._transform()`][stream-_transform], `'end'` is fired after all data has -been output which is after the callback in [`stream._flush()`][stream-_flush] -has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush()` method, which will be -called at the very end, after all the written data is consumed, but -before emitting [`'end'`][] to signal the end of the readable side. Just -like with [`stream._transform()`][stream-_transform], call -`transform.push(chunk)` zero or more times, as appropriate, and call `callback` -when the flush operation is complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument and data) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. - -`_transform()` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. If you supply a second argument to the callback -it will be passed to the push method. In other words the following are -equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### Example: `SimpleProtocol` parser v2 - -The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple -protocol parser can be implemented simply by using the higher level -[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol -v1` examples. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node.js stream -approach. - -```javascript -const util = require('util'); -const Transform = require('stream').Transform; -util.inherits(SimpleProtocol, Transform); - -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(chunk.slice(split)); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(chunk); - } - done(); -}; - -// Usage: -// var parser = new SimpleProtocol(); -// source.pipe(parser) -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Writable - - - -`stream.Writable` is an abstract class designed to be extended with an -underlying implementation of the -[`stream._write(chunk, encoding, callback)`][stream-_write] method. - -Please see [API for Stream Consumers][] for how to consume -writable streams in your programs. What follows is an explanation of -how to implement Writable streams in your programs. - -#### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Default = `16384` - (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Default = `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can - write arbitrary data instead of only `Buffer` / `String` data. - Default = `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a -[`stream._write()`][stream-_write] method to send data to the underlying -resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunks. - -Note: **This function MUST NOT be called directly.** It may be -implemented by child classes, and called by the internal Writable -class methods only. - -This function is completely optional to implement. In most cases it is -unnecessary. If implemented, it will be called with all the chunks -that are buffered in the write queue. - - -## Simplified Constructor API - - - -In simple cases there is now the added benefit of being able to construct a -stream without inheritance. - -This can be done by passing the appropriate methods as constructor options: - -Examples: - -### Duplex - -```js -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -### Readable - -```js -var readable = new stream.Readable({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - } -}); -``` - -### Transform - -```js -var transform = new stream.Transform({ - transform: function(chunk, encoding, next) { - // sets this._transform under the hood - - // generate output as many times as needed - // this.push(chunk); - - // call when the current chunk is consumed - next(); - }, - flush: function(done) { - // sets this._flush under the hood - - // generate output as many times as needed - // this.push(chunk); - - done(); - } -}); -``` - -### Writable - -```js -var writable = new stream.Writable({ - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var writable = new stream.Writable({ - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -## Streams: Under the Hood - - - -### Buffering - - - -Both Writable and Readable streams will buffer data on an internal -object which can be retrieved from `_writableState.getBuffer()` or -`_readableState.buffer`, respectively. - -The amount of data that will potentially be buffered depends on the -`highWaterMark` option which is passed into the constructor. - -Buffering in Readable streams happens when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], then the data will sit in the internal -queue until it is consumed. - -Buffering in Writable streams happens when the user calls -[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. - -The purpose of streams, especially with the [`stream.pipe()`][] method, is to -limit the buffering of data to acceptable levels, so that sources and -destinations of varying speed will not overwhelm the available memory. - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the [`stream.read()`][stream-read] method, - [`'data'`][] events would start emitting immediately. If you needed to do - some I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that you still had to be prepared to receive - [`'data'`][] events even when the stream was in a paused state. - -In Node.js v0.10, the [Readable][] class was added. -For backwards compatibility with older Node.js programs, Readable streams -switch into "flowing mode" when a [`'data'`][] event handler is added, or -when the [`stream.resume()`][stream-resume] method is called. The effect is -that, even if you are not using the new [`stream.read()`][stream-read] method -and [`'readable'`][] event, you no longer have to worry about losing -[`'data'`][] chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No [`'data'`][] event handler is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, -the socket will remain paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to start the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`stream.wrap()`][] method. - - -### Object Mode - - - -Normally, Streams operate on Strings and Buffers exclusively. - -Streams that are in **object mode** can emit generic JavaScript values -other than Buffers and Strings. - -A Readable stream in object mode will always return a single item from -a call to [`stream.read(size)`][stream-read], regardless of what the size -argument is. - -A Writable stream in object mode will always ignore the `encoding` -argument to [`stream.write(data, encoding)`][stream-write]. - -The special value `null` still retains its special value for object -mode streams. That is, for object mode readable streams, `null` as a -return value from [`stream.read()`][stream-read] indicates that there is no more -data, and [`stream.push(null)`][stream-push] will signal the end of stream data -(`EOF`). - -No streams in Node.js core are object mode streams. This pattern is only -used by userland streaming libraries. - -You should set `objectMode` in your stream child class constructor on -the options object. Setting `objectMode` mid-stream is not safe. - -For Duplex streams `objectMode` can be set exclusively for readable or -writable side with `readableObjectMode` and `writableObjectMode` -respectively. These options can be used to implement parsers and -serializers with Transform streams. - -```js -const util = require('util'); -const StringDecoder = require('string_decoder').StringDecoder; -const Transform = require('stream').Transform; -util.inherits(JSONParseStream, Transform); - -// Gets \n-delimited JSON string data, and emits the parsed objects -function JSONParseStream() { - if (!(this instanceof JSONParseStream)) - return new JSONParseStream(); - - Transform.call(this, { readableObjectMode : true }); - - this._buffer = ''; - this._decoder = new StringDecoder('utf8'); -} - -JSONParseStream.prototype._transform = function(chunk, encoding, cb) { - this._buffer += this._decoder.write(chunk); - // split on newlines - var lines = this._buffer.split(/\r?\n/); - // keep the last partial line buffered - this._buffer = lines.pop(); - for (var l = 0; l < lines.length; l++) { - var line = lines[l]; - try { - var obj = JSON.parse(line); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; - -JSONParseStream.prototype._flush = function(cb) { - // Just handle any leftover - var rem = this._buffer.trim(); - if (rem) { - try { - var obj = JSON.parse(rem); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; -``` - -### `stream.read(0)` - -There are some cases where you want to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In that case, you can call `stream.read(0)`, which will always -return null. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -There is almost never a need to do this. However, you will see some -cases in Node.js's internals where this is done, particularly in the -Readable stream class internals. - -### `stream.push('')` - -Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an -interesting side effect. Because it *is* a call to -[`stream.push()`][stream-push], it will end the `reading` process. However, it -does *not* add any data to the readable buffer, so there's nothing for -a user to consume. - -Very rarely, there are cases where you have no data to provide now, -but the consumer of your stream (or, perhaps, another bit of your own -code) will know when to check again, by calling [`stream.read(0)`][stream-read]. -In those cases, you *may* call `stream.push('')`. - -So far, the only use case for this functionality is in the -[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you -find that you have to use `stream.push('')`, please consider another -approach, because it almost certainly indicates that something is -horribly wrong. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream -[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementors]: #stream_api_for_stream_implementors -[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream -[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index c141a99c26c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,58 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af8762..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b8400..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f1868..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 54a9d5c553d..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,880 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Readable.ReadableState = ReadableState; - -var EE = require('events'); - -/**/ -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = undefined; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var StringDecoder; - -util.inherits(Readable, Stream); - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.buffer = []; - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = new Buffer(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var e = new Error('stream.unshift() after end event'); - stream.emit('error', e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -function howMuchToRead(n, state) { - if (state.length === 0 && state.ended) return 0; - - if (state.objectMode) return n === 0 ? 0 : 1; - - if (n === null || isNaN(n)) { - // only flow one buffer at a time - if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; - } - - if (n <= 0) return 0; - - // If we're asking for more than the target buffer level, - // then raise the water mark. Bump up to the next highest - // power of 2, to prevent increasing it excessively in tiny - // amounts. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - // don't have that much. return null, unless we've ended. - if (n > state.length) { - if (!state.ended) { - state.needReadable = true; - return 0; - } else { - return state.length; - } - } - - return n; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - var state = this._readableState; - var nOrig = n; - - if (typeof n !== 'number' || n > 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } - - if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - } - - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (doRead && !state.reading) n = howMuchToRead(nOrig, state); - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } - - state.length -= n; - - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (state.length === 0 && !state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended && state.length === 0) endReadable(this); - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - if (false === ret) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // This is a brutally ugly hack to make sure that our error handler - // is attached before any userland ones. NEVER DO THIS. - if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - // If listening to data, and it has not explicitly been paused, - // then call resume to start the flow of data on the next tick. - if (ev === 'data' && false !== this._readableState.flowing) { - this.resume(); - } - - if (ev === 'readable' && !this._readableState.endEmitted) { - var state = this._readableState; - if (!state.readableListening) { - state.readableListening = true; - state.emittedReadable = false; - state.needReadable = true; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - if (state.flowing) { - do { - var chunk = stream.read(); - } while (null !== chunk && state.flowing); - } -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -function fromList(n, state) { - var list = state.buffer; - var length = state.length; - var stringMode = !!state.decoder; - var objectMode = !!state.objectMode; - var ret; - - // nothing in the list, definitely empty. - if (list.length === 0) return null; - - if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { - // read it all, truncate the array. - if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); - list.length = 0; - } else { - // read just some of it. - if (n < list[0].length) { - // just take a part of the first list item. - // slice is the same for buffers and strings. - var buf = list[0]; - ret = buf.slice(0, n); - list[0] = buf.slice(n); - } else if (n === list[0].length) { - // first list is a perfect match - ret = list.shift(); - } else { - // complex case. - // we have enough to cover it, but it spans past the first buffer. - if (stringMode) ret = '';else ret = new Buffer(n); - - var c = 0; - for (var i = 0, l = list.length; i < l && c < n; i++) { - var buf = list[0]; - var cpy = Math.min(n - c, buf.length); - - if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); - - if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); - - c += cpy; - } - } - } - - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('endReadable called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 625cdc17698..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 95916c992a9..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,516 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // create the two objects needed to store the corked requests - // they are not a linked list, as no new elements are inserted in there - this.corkedRequestsFree = new CorkedRequest(this); - this.corkedRequestsFree.next = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe. Not readable.')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - var er = new TypeError('Invalid non-string/buffer chunk'); - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = new Buffer(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE deleted file mode 100644 index d8d7f9437db..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md deleted file mode 100644 index 5a76b4149c5..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# core-util-is - -The `util.is*` functions introduced in Node v0.12. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch deleted file mode 100644 index a06d5c05f75..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/float.patch +++ /dev/null @@ -1,604 +0,0 @@ -diff --git a/lib/util.js b/lib/util.js -index a03e874..9074e8e 100644 ---- a/lib/util.js -+++ b/lib/util.js -@@ -19,430 +19,6 @@ - // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - // USE OR OTHER DEALINGS IN THE SOFTWARE. - --var formatRegExp = /%[sdj%]/g; --exports.format = function(f) { -- if (!isString(f)) { -- var objects = []; -- for (var i = 0; i < arguments.length; i++) { -- objects.push(inspect(arguments[i])); -- } -- return objects.join(' '); -- } -- -- var i = 1; -- var args = arguments; -- var len = args.length; -- var str = String(f).replace(formatRegExp, function(x) { -- if (x === '%%') return '%'; -- if (i >= len) return x; -- switch (x) { -- case '%s': return String(args[i++]); -- case '%d': return Number(args[i++]); -- case '%j': -- try { -- return JSON.stringify(args[i++]); -- } catch (_) { -- return '[Circular]'; -- } -- default: -- return x; -- } -- }); -- for (var x = args[i]; i < len; x = args[++i]) { -- if (isNull(x) || !isObject(x)) { -- str += ' ' + x; -- } else { -- str += ' ' + inspect(x); -- } -- } -- return str; --}; -- -- --// Mark that a method should not be used. --// Returns a modified function which warns once by default. --// If --no-deprecation is set, then it is a no-op. --exports.deprecate = function(fn, msg) { -- // Allow for deprecating things in the process of starting up. -- if (isUndefined(global.process)) { -- return function() { -- return exports.deprecate(fn, msg).apply(this, arguments); -- }; -- } -- -- if (process.noDeprecation === true) { -- return fn; -- } -- -- var warned = false; -- function deprecated() { -- if (!warned) { -- if (process.throwDeprecation) { -- throw new Error(msg); -- } else if (process.traceDeprecation) { -- console.trace(msg); -- } else { -- console.error(msg); -- } -- warned = true; -- } -- return fn.apply(this, arguments); -- } -- -- return deprecated; --}; -- -- --var debugs = {}; --var debugEnviron; --exports.debuglog = function(set) { -- if (isUndefined(debugEnviron)) -- debugEnviron = process.env.NODE_DEBUG || ''; -- set = set.toUpperCase(); -- if (!debugs[set]) { -- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { -- var pid = process.pid; -- debugs[set] = function() { -- var msg = exports.format.apply(exports, arguments); -- console.error('%s %d: %s', set, pid, msg); -- }; -- } else { -- debugs[set] = function() {}; -- } -- } -- return debugs[set]; --}; -- -- --/** -- * Echos the value of a value. Trys to print the value out -- * in the best way possible given the different types. -- * -- * @param {Object} obj The object to print out. -- * @param {Object} opts Optional options object that alters the output. -- */ --/* legacy: obj, showHidden, depth, colors*/ --function inspect(obj, opts) { -- // default options -- var ctx = { -- seen: [], -- stylize: stylizeNoColor -- }; -- // legacy... -- if (arguments.length >= 3) ctx.depth = arguments[2]; -- if (arguments.length >= 4) ctx.colors = arguments[3]; -- if (isBoolean(opts)) { -- // legacy... -- ctx.showHidden = opts; -- } else if (opts) { -- // got an "options" object -- exports._extend(ctx, opts); -- } -- // set default options -- if (isUndefined(ctx.showHidden)) ctx.showHidden = false; -- if (isUndefined(ctx.depth)) ctx.depth = 2; -- if (isUndefined(ctx.colors)) ctx.colors = false; -- if (isUndefined(ctx.customInspect)) ctx.customInspect = true; -- if (ctx.colors) ctx.stylize = stylizeWithColor; -- return formatValue(ctx, obj, ctx.depth); --} --exports.inspect = inspect; -- -- --// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics --inspect.colors = { -- 'bold' : [1, 22], -- 'italic' : [3, 23], -- 'underline' : [4, 24], -- 'inverse' : [7, 27], -- 'white' : [37, 39], -- 'grey' : [90, 39], -- 'black' : [30, 39], -- 'blue' : [34, 39], -- 'cyan' : [36, 39], -- 'green' : [32, 39], -- 'magenta' : [35, 39], -- 'red' : [31, 39], -- 'yellow' : [33, 39] --}; -- --// Don't use 'blue' not visible on cmd.exe --inspect.styles = { -- 'special': 'cyan', -- 'number': 'yellow', -- 'boolean': 'yellow', -- 'undefined': 'grey', -- 'null': 'bold', -- 'string': 'green', -- 'date': 'magenta', -- // "name": intentionally not styling -- 'regexp': 'red' --}; -- -- --function stylizeWithColor(str, styleType) { -- var style = inspect.styles[styleType]; -- -- if (style) { -- return '\u001b[' + inspect.colors[style][0] + 'm' + str + -- '\u001b[' + inspect.colors[style][1] + 'm'; -- } else { -- return str; -- } --} -- -- --function stylizeNoColor(str, styleType) { -- return str; --} -- -- --function arrayToHash(array) { -- var hash = {}; -- -- array.forEach(function(val, idx) { -- hash[val] = true; -- }); -- -- return hash; --} -- -- --function formatValue(ctx, value, recurseTimes) { -- // Provide a hook for user-specified inspect functions. -- // Check that value is an object with an inspect function on it -- if (ctx.customInspect && -- value && -- isFunction(value.inspect) && -- // Filter out the util module, it's inspect function is special -- value.inspect !== exports.inspect && -- // Also filter out any prototype objects using the circular check. -- !(value.constructor && value.constructor.prototype === value)) { -- var ret = value.inspect(recurseTimes, ctx); -- if (!isString(ret)) { -- ret = formatValue(ctx, ret, recurseTimes); -- } -- return ret; -- } -- -- // Primitive types cannot have properties -- var primitive = formatPrimitive(ctx, value); -- if (primitive) { -- return primitive; -- } -- -- // Look up the keys of the object. -- var keys = Object.keys(value); -- var visibleKeys = arrayToHash(keys); -- -- if (ctx.showHidden) { -- keys = Object.getOwnPropertyNames(value); -- } -- -- // Some type of object without properties can be shortcutted. -- if (keys.length === 0) { -- if (isFunction(value)) { -- var name = value.name ? ': ' + value.name : ''; -- return ctx.stylize('[Function' + name + ']', 'special'); -- } -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } -- if (isDate(value)) { -- return ctx.stylize(Date.prototype.toString.call(value), 'date'); -- } -- if (isError(value)) { -- return formatError(value); -- } -- } -- -- var base = '', array = false, braces = ['{', '}']; -- -- // Make Array say that they are Array -- if (isArray(value)) { -- array = true; -- braces = ['[', ']']; -- } -- -- // Make functions say that they are functions -- if (isFunction(value)) { -- var n = value.name ? ': ' + value.name : ''; -- base = ' [Function' + n + ']'; -- } -- -- // Make RegExps say that they are RegExps -- if (isRegExp(value)) { -- base = ' ' + RegExp.prototype.toString.call(value); -- } -- -- // Make dates with properties first say the date -- if (isDate(value)) { -- base = ' ' + Date.prototype.toUTCString.call(value); -- } -- -- // Make error with message first say the error -- if (isError(value)) { -- base = ' ' + formatError(value); -- } -- -- if (keys.length === 0 && (!array || value.length == 0)) { -- return braces[0] + base + braces[1]; -- } -- -- if (recurseTimes < 0) { -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } else { -- return ctx.stylize('[Object]', 'special'); -- } -- } -- -- ctx.seen.push(value); -- -- var output; -- if (array) { -- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); -- } else { -- output = keys.map(function(key) { -- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); -- }); -- } -- -- ctx.seen.pop(); -- -- return reduceToSingleString(output, base, braces); --} -- -- --function formatPrimitive(ctx, value) { -- if (isUndefined(value)) -- return ctx.stylize('undefined', 'undefined'); -- if (isString(value)) { -- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') -- .replace(/'/g, "\\'") -- .replace(/\\"/g, '"') + '\''; -- return ctx.stylize(simple, 'string'); -- } -- if (isNumber(value)) { -- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0, -- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 . -- if (value === 0 && 1 / value < 0) -- return ctx.stylize('-0', 'number'); -- return ctx.stylize('' + value, 'number'); -- } -- if (isBoolean(value)) -- return ctx.stylize('' + value, 'boolean'); -- // For some reason typeof null is "object", so special case here. -- if (isNull(value)) -- return ctx.stylize('null', 'null'); --} -- -- --function formatError(value) { -- return '[' + Error.prototype.toString.call(value) + ']'; --} -- -- --function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { -- var output = []; -- for (var i = 0, l = value.length; i < l; ++i) { -- if (hasOwnProperty(value, String(i))) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- String(i), true)); -- } else { -- output.push(''); -- } -- } -- keys.forEach(function(key) { -- if (!key.match(/^\d+$/)) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- key, true)); -- } -- }); -- return output; --} -- -- --function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { -- var name, str, desc; -- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; -- if (desc.get) { -- if (desc.set) { -- str = ctx.stylize('[Getter/Setter]', 'special'); -- } else { -- str = ctx.stylize('[Getter]', 'special'); -- } -- } else { -- if (desc.set) { -- str = ctx.stylize('[Setter]', 'special'); -- } -- } -- if (!hasOwnProperty(visibleKeys, key)) { -- name = '[' + key + ']'; -- } -- if (!str) { -- if (ctx.seen.indexOf(desc.value) < 0) { -- if (isNull(recurseTimes)) { -- str = formatValue(ctx, desc.value, null); -- } else { -- str = formatValue(ctx, desc.value, recurseTimes - 1); -- } -- if (str.indexOf('\n') > -1) { -- if (array) { -- str = str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n').substr(2); -- } else { -- str = '\n' + str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n'); -- } -- } -- } else { -- str = ctx.stylize('[Circular]', 'special'); -- } -- } -- if (isUndefined(name)) { -- if (array && key.match(/^\d+$/)) { -- return str; -- } -- name = JSON.stringify('' + key); -- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { -- name = name.substr(1, name.length - 2); -- name = ctx.stylize(name, 'name'); -- } else { -- name = name.replace(/'/g, "\\'") -- .replace(/\\"/g, '"') -- .replace(/(^"|"$)/g, "'"); -- name = ctx.stylize(name, 'string'); -- } -- } -- -- return name + ': ' + str; --} -- -- --function reduceToSingleString(output, base, braces) { -- var numLinesEst = 0; -- var length = output.reduce(function(prev, cur) { -- numLinesEst++; -- if (cur.indexOf('\n') >= 0) numLinesEst++; -- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; -- }, 0); -- -- if (length > 60) { -- return braces[0] + -- (base === '' ? '' : base + '\n ') + -- ' ' + -- output.join(',\n ') + -- ' ' + -- braces[1]; -- } -- -- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; --} -- -- - // NOTE: These type checking functions intentionally don't use `instanceof` - // because it is fragile and can be easily faked with `Object.create()`. - function isArray(ar) { -@@ -522,166 +98,10 @@ function isPrimitive(arg) { - exports.isPrimitive = isPrimitive; - - function isBuffer(arg) { -- return arg instanceof Buffer; -+ return Buffer.isBuffer(arg); - } - exports.isBuffer = isBuffer; - - function objectToString(o) { - return Object.prototype.toString.call(o); --} -- -- --function pad(n) { -- return n < 10 ? '0' + n.toString(10) : n.toString(10); --} -- -- --var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', -- 'Oct', 'Nov', 'Dec']; -- --// 26 Feb 16:19:34 --function timestamp() { -- var d = new Date(); -- var time = [pad(d.getHours()), -- pad(d.getMinutes()), -- pad(d.getSeconds())].join(':'); -- return [d.getDate(), months[d.getMonth()], time].join(' '); --} -- -- --// log is just a thin wrapper to console.log that prepends a timestamp --exports.log = function() { -- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); --}; -- -- --/** -- * Inherit the prototype methods from one constructor into another. -- * -- * The Function.prototype.inherits from lang.js rewritten as a standalone -- * function (not on Function.prototype). NOTE: If this file is to be loaded -- * during bootstrapping this function needs to be rewritten using some native -- * functions as prototype setup using normal JavaScript does not work as -- * expected during bootstrapping (see mirror.js in r114903). -- * -- * @param {function} ctor Constructor function which needs to inherit the -- * prototype. -- * @param {function} superCtor Constructor function to inherit prototype from. -- */ --exports.inherits = function(ctor, superCtor) { -- ctor.super_ = superCtor; -- ctor.prototype = Object.create(superCtor.prototype, { -- constructor: { -- value: ctor, -- enumerable: false, -- writable: true, -- configurable: true -- } -- }); --}; -- --exports._extend = function(origin, add) { -- // Don't do anything if add isn't an object -- if (!add || !isObject(add)) return origin; -- -- var keys = Object.keys(add); -- var i = keys.length; -- while (i--) { -- origin[keys[i]] = add[keys[i]]; -- } -- return origin; --}; -- --function hasOwnProperty(obj, prop) { -- return Object.prototype.hasOwnProperty.call(obj, prop); --} -- -- --// Deprecated old stuff. -- --exports.p = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- console.error(exports.inspect(arguments[i])); -- } --}, 'util.p: Use console.error() instead'); -- -- --exports.exec = exports.deprecate(function() { -- return require('child_process').exec.apply(this, arguments); --}, 'util.exec is now called `child_process.exec`.'); -- -- --exports.print = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(String(arguments[i])); -- } --}, 'util.print: Use console.log instead'); -- -- --exports.puts = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(arguments[i] + '\n'); -- } --}, 'util.puts: Use console.log instead'); -- -- --exports.debug = exports.deprecate(function(x) { -- process.stderr.write('DEBUG: ' + x + '\n'); --}, 'util.debug: Use console.error instead'); -- -- --exports.error = exports.deprecate(function(x) { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stderr.write(arguments[i] + '\n'); -- } --}, 'util.error: Use console.error instead'); -- -- --exports.pump = exports.deprecate(function(readStream, writeStream, callback) { -- var callbackCalled = false; -- -- function call(a, b, c) { -- if (callback && !callbackCalled) { -- callback(a, b, c); -- callbackCalled = true; -- } -- } -- -- readStream.addListener('data', function(chunk) { -- if (writeStream.write(chunk) === false) readStream.pause(); -- }); -- -- writeStream.addListener('drain', function() { -- readStream.resume(); -- }); -- -- readStream.addListener('end', function() { -- writeStream.end(); -- }); -- -- readStream.addListener('close', function() { -- call(); -- }); -- -- readStream.addListener('error', function(err) { -- writeStream.end(); -- call(err); -- }); -- -- writeStream.addListener('error', function(err) { -- readStream.destroy(); -- call(err); -- }); --}, 'util.pump(): Use readableStream.pipe() instead'); -- -- --var uv; --exports._errnoException = function(err, syscall) { -- if (isUndefined(uv)) uv = process.binding('uv'); -- var errname = uv.errname(err); -- var e = new Error(syscall + ' ' + errname); -- e.code = errname; -- e.errno = errname; -- e.syscall = syscall; -- return e; --}; -+} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js deleted file mode 100644 index ff4c851c075..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/lib/util.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. - -function isArray(arg) { - if (Array.isArray) { - return Array.isArray(arg); - } - return objectToString(arg) === '[object Array]'; -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -exports.isBuffer = Buffer.isBuffer; - -function objectToString(o) { - return Object.prototype.toString.call(o); -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json deleted file mode 100644 index f2794488b08..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/package.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "core-util-is@>=1.0.0 <1.1.0", - "_id": "core-util-is@1.0.2", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/core-util-is", - "_nodeVersion": "4.0.0", - "_npmUser": { - "name": "isaacs", - "email": "i@izs.me" - }, - "_npmVersion": "3.3.2", - "_phantomChildren": {}, - "_requested": { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "_shrinkwrap": null, - "_spec": "core-util-is@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/core-util-is/issues" - }, - "dependencies": {}, - "description": "The `util.is*` functions introduced in Node v0.12.", - "devDependencies": { - "tap": "^2.3.0" - }, - "directories": {}, - "dist": { - "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "tarball": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - }, - "gitHead": "a177da234df5638b363ddc15fa324619a38577c8", - "homepage": "https://github.com/isaacs/core-util-is#readme", - "keywords": [ - "util", - "isBuffer", - "isArray", - "isNumber", - "isString", - "isRegExp", - "isThis", - "isThat", - "polyfill" - ], - "license": "MIT", - "main": "lib/util.js", - "maintainers": [ - { - "name": "isaacs", - "email": "i@izs.me" - } - ], - "name": "core-util-is", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/core-util-is.git" - }, - "scripts": { - "test": "tap test.js" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js deleted file mode 100644 index 1a490c65ac8..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/core-util-is/test.js +++ /dev/null @@ -1,68 +0,0 @@ -var assert = require('tap'); - -var t = require('./lib/util'); - -assert.equal(t.isArray([]), true); -assert.equal(t.isArray({}), false); - -assert.equal(t.isBoolean(null), false); -assert.equal(t.isBoolean(true), true); -assert.equal(t.isBoolean(false), true); - -assert.equal(t.isNull(null), true); -assert.equal(t.isNull(undefined), false); -assert.equal(t.isNull(false), false); -assert.equal(t.isNull(), false); - -assert.equal(t.isNullOrUndefined(null), true); -assert.equal(t.isNullOrUndefined(undefined), true); -assert.equal(t.isNullOrUndefined(false), false); -assert.equal(t.isNullOrUndefined(), true); - -assert.equal(t.isNumber(null), false); -assert.equal(t.isNumber('1'), false); -assert.equal(t.isNumber(1), true); - -assert.equal(t.isString(null), false); -assert.equal(t.isString('1'), true); -assert.equal(t.isString(1), false); - -assert.equal(t.isSymbol(null), false); -assert.equal(t.isSymbol('1'), false); -assert.equal(t.isSymbol(1), false); -assert.equal(t.isSymbol(Symbol()), true); - -assert.equal(t.isUndefined(null), false); -assert.equal(t.isUndefined(undefined), true); -assert.equal(t.isUndefined(false), false); -assert.equal(t.isUndefined(), true); - -assert.equal(t.isRegExp(null), false); -assert.equal(t.isRegExp('1'), false); -assert.equal(t.isRegExp(new RegExp()), true); - -assert.equal(t.isObject({}), true); -assert.equal(t.isObject([]), true); -assert.equal(t.isObject(new RegExp()), true); -assert.equal(t.isObject(new Date()), true); - -assert.equal(t.isDate(null), false); -assert.equal(t.isDate('1'), false); -assert.equal(t.isDate(new Date()), true); - -assert.equal(t.isError(null), false); -assert.equal(t.isError({ err: true }), false); -assert.equal(t.isError(new Error()), true); - -assert.equal(t.isFunction(null), false); -assert.equal(t.isFunction({ }), false); -assert.equal(t.isFunction(function() {}), true); - -assert.equal(t.isPrimitive(null), true); -assert.equal(t.isPrimitive(''), true); -assert.equal(t.isPrimitive(0), true); -assert.equal(t.isPrimitive(new Date()), false); - -assert.equal(t.isBuffer(null), false); -assert.equal(t.isBuffer({}), false); -assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile deleted file mode 100644 index 0ecc29c402c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/Makefile +++ /dev/null @@ -1,5 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c619..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json deleted file mode 100644 index 9e31b683889..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js deleted file mode 100644 index a57f6349594..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json deleted file mode 100644 index 529beb69b77..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/package.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "isarray@>=1.0.0 <1.1.0", - "_id": "isarray@1.0.0", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/isarray", - "_nodeVersion": "5.1.0", - "_npmUser": { - "name": "juliangruber", - "email": "julian@juliangruber.com" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": {}, - "_requested": { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "_shrinkwrap": null, - "_spec": "isarray@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "bugs": { - "url": "https://github.com/juliangruber/isarray/issues" - }, - "dependencies": {}, - "description": "Array#isArray for older browsers", - "devDependencies": { - "tape": "~2.13.4" - }, - "directories": {}, - "dist": { - "shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - }, - "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", - "homepage": "https://github.com/juliangruber/isarray", - "keywords": [ - "browser", - "isarray", - "array" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "juliangruber", - "email": "julian@juliangruber.com" - } - ], - "name": "isarray", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "scripts": { - "test": "tape test.js" - }, - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "version": "1.0.0" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js deleted file mode 100644 index f7f7bcd19fe..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/test.js +++ /dev/null @@ -1,19 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml deleted file mode 100644 index 36201b10017..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.11" - - "0.12" - - "1.7.1" - - 1 - - 2 - - 3 - - 4 - - 5 diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js deleted file mode 100644 index a4f40f845fa..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/index.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -if (!process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = nextTick; -} else { - module.exports = process.nextTick; -} - -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json deleted file mode 100644 index c9d5f61669b..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/package.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "process-nextick-args@>=1.0.6 <1.1.0", - "_id": "process-nextick-args@1.0.7", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/process-nextick-args", - "_nodeVersion": "5.11.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.8.6", - "_phantomChildren": {}, - "_requested": { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "_shrinkwrap": null, - "_spec": "process-nextick-args@~1.0.6", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": "", - "bugs": { - "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" - }, - "dependencies": {}, - "description": "process.nextTick but always with args", - "devDependencies": { - "tap": "~0.2.6" - }, - "directories": {}, - "dist": { - "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" - }, - "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", - "homepage": "https://github.com/calvinmetcalf/process-nextick-args", - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "process-nextick-args", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.7" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md deleted file mode 100644 index 78e7cfaeb7a..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -process-nextick-args -===== - -[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) - -```bash -npm install --save process-nextick-args -``` - -Always be able to pass arguments to process.nextTick, no matter the platform - -```js -var nextTick = require('process-nextick-args'); - -nextTick(function (a, b, c) { - console.log(a, b, c); -}, 'step', 3, 'profit'); -``` diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js deleted file mode 100644 index ef15721584a..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/test.js +++ /dev/null @@ -1,24 +0,0 @@ -var test = require("tap").test; -var nextTick = require('./'); - -test('should work', function (t) { - t.plan(5); - nextTick(function (a) { - t.ok(a); - nextTick(function (thing) { - t.equals(thing, 7); - }, 7); - }, true); - nextTick(function (a, b, c) { - t.equals(a, 'step'); - t.equals(b, 3); - t.equals(c, 'profit'); - }, 'step', 3, 'profit'); -}); - -test('correct number of arguments', function (t) { - t.plan(1); - nextTick(function () { - t.equals(2, arguments.length, 'correct number'); - }, 1, 2); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore deleted file mode 100644 index 206320cc1d2..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -build -test diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE deleted file mode 100644 index 6de584a48f5..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md deleted file mode 100644 index 4d2aa001501..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/README.md +++ /dev/null @@ -1,7 +0,0 @@ -**string_decoder.js** (`require('string_decoder')`) from Node.js core - -Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. - -Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** - -The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js deleted file mode 100644 index b00e54fb790..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/index.js +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var Buffer = require('buffer').Buffer; - -var isBufferEncoding = Buffer.isEncoding - || function(encoding) { - switch (encoding && encoding.toLowerCase()) { - case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; - default: return false; - } - } - - -function assertEncoding(encoding) { - if (encoding && !isBufferEncoding(encoding)) { - throw new Error('Unknown encoding: ' + encoding); - } -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. CESU-8 is handled as part of the UTF-8 encoding. -// -// @TODO Handling all encodings inside a single object makes it very difficult -// to reason about this code, so it should be split up in the future. -// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code -// points as used by CESU-8. -var StringDecoder = exports.StringDecoder = function(encoding) { - this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); - assertEncoding(encoding); - switch (this.encoding) { - case 'utf8': - // CESU-8 represents each of Surrogate Pair by 3-bytes - this.surrogateSize = 3; - break; - case 'ucs2': - case 'utf16le': - // UTF-16 represents each of Surrogate Pair by 2-bytes - this.surrogateSize = 2; - this.detectIncompleteChar = utf16DetectIncompleteChar; - break; - case 'base64': - // Base-64 stores 3 bytes in 4 chars, and pads the remainder. - this.surrogateSize = 3; - this.detectIncompleteChar = base64DetectIncompleteChar; - break; - default: - this.write = passThroughWrite; - return; - } - - // Enough space to store all bytes of a single character. UTF-8 needs 4 - // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). - this.charBuffer = new Buffer(6); - // Number of bytes received for the current incomplete multi-byte character. - this.charReceived = 0; - // Number of bytes expected for the current incomplete multi-byte character. - this.charLength = 0; -}; - - -// write decodes the given buffer and returns it as JS string that is -// guaranteed to not contain any partial multi-byte characters. Any partial -// character found at the end of the buffer is buffered up, and will be -// returned when calling write again with the remaining bytes. -// -// Note: Converting a Buffer containing an orphan surrogate to a String -// currently works, but converting a String to a Buffer (via `new Buffer`, or -// Buffer#write) will replace incomplete surrogates with the unicode -// replacement character. See https://codereview.chromium.org/121173009/ . -StringDecoder.prototype.write = function(buffer) { - var charStr = ''; - // if our last write ended with an incomplete multibyte character - while (this.charLength) { - // determine how many remaining bytes this buffer has to offer for this char - var available = (buffer.length >= this.charLength - this.charReceived) ? - this.charLength - this.charReceived : - buffer.length; - - // add the new bytes to the char buffer - buffer.copy(this.charBuffer, this.charReceived, 0, available); - this.charReceived += available; - - if (this.charReceived < this.charLength) { - // still not enough chars in this buffer? wait for more ... - return ''; - } - - // remove bytes belonging to the current character from the buffer - buffer = buffer.slice(available, buffer.length); - - // get the character that was split - charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); - - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - var charCode = charStr.charCodeAt(charStr.length - 1); - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - this.charLength += this.surrogateSize; - charStr = ''; - continue; - } - this.charReceived = this.charLength = 0; - - // if there are no more bytes in this buffer, just emit our char - if (buffer.length === 0) { - return charStr; - } - break; - } - - // determine and set charLength / charReceived - this.detectIncompleteChar(buffer); - - var end = buffer.length; - if (this.charLength) { - // buffer the incomplete character bytes we got - buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); - end -= this.charReceived; - } - - charStr += buffer.toString(this.encoding, 0, end); - - var end = charStr.length - 1; - var charCode = charStr.charCodeAt(end); - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - var size = this.surrogateSize; - this.charLength += size; - this.charReceived += size; - this.charBuffer.copy(this.charBuffer, size, 0, size); - buffer.copy(this.charBuffer, 0, 0, size); - return charStr.substring(0, end); - } - - // or just emit the charStr - return charStr; -}; - -// detectIncompleteChar determines if there is an incomplete UTF-8 character at -// the end of the given buffer. If so, it sets this.charLength to the byte -// length that character, and sets this.charReceived to the number of bytes -// that are available for this character. -StringDecoder.prototype.detectIncompleteChar = function(buffer) { - // determine how many bytes we have to check at the end of this buffer - var i = (buffer.length >= 3) ? 3 : buffer.length; - - // Figure out if one of the last i bytes of our buffer announces an - // incomplete char. - for (; i > 0; i--) { - var c = buffer[buffer.length - i]; - - // See http://en.wikipedia.org/wiki/UTF-8#Description - - // 110XXXXX - if (i == 1 && c >> 5 == 0x06) { - this.charLength = 2; - break; - } - - // 1110XXXX - if (i <= 2 && c >> 4 == 0x0E) { - this.charLength = 3; - break; - } - - // 11110XXX - if (i <= 3 && c >> 3 == 0x1E) { - this.charLength = 4; - break; - } - } - this.charReceived = i; -}; - -StringDecoder.prototype.end = function(buffer) { - var res = ''; - if (buffer && buffer.length) - res = this.write(buffer); - - if (this.charReceived) { - var cr = this.charReceived; - var buf = this.charBuffer; - var enc = this.encoding; - res += buf.slice(0, cr).toString(enc); - } - - return res; -}; - -function passThroughWrite(buffer) { - return buffer.toString(this.encoding); -} - -function utf16DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 2; - this.charLength = this.charReceived ? 2 : 0; -} - -function base64DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 3; - this.charLength = this.charReceived ? 3 : 0; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json deleted file mode 100644 index 4bc4749854b..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/string_decoder/package.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "string_decoder@>=0.10.0 <0.11.0", - "_id": "string_decoder@0.10.31", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/string_decoder", - "_npmUser": { - "name": "rvagg", - "email": "rod@vagg.org" - }, - "_npmVersion": "1.4.23", - "_phantomChildren": {}, - "_requested": { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "_shrinkwrap": null, - "_spec": "string_decoder@~0.10.x", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/rvagg/string_decoder/issues" - }, - "dependencies": {}, - "description": "The string_decoder module from Node core", - "devDependencies": { - "tap": "~0.4.8" - }, - "directories": {}, - "dist": { - "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", - "homepage": "https://github.com/rvagg/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "substack", - "email": "mail@substack.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - } - ], - "name": "string_decoder", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/rvagg/string_decoder.git" - }, - "scripts": { - "test": "tap test/simple/*.js" - }, - "version": "0.10.31" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md deleted file mode 100644 index acc8675372e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/History.md +++ /dev/null @@ -1,16 +0,0 @@ - -1.0.2 / 2015-10-07 -================== - - * use try/catch when checking `localStorage` (#3, @kumavis) - -1.0.1 / 2014-11-25 -================== - - * browser: use `console.warn()` for deprecation calls - * browser: more jsdocs - -1.0.0 / 2014-04-30 -================== - - * initial commit diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE deleted file mode 100644 index 6a60e8c225c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa7c25..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js deleted file mode 100644 index 549ae2f065e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/browser.js +++ /dev/null @@ -1,67 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = deprecate; - -/** - * Mark that a method should not be used. - * Returns a modified function which warns once by default. - * - * If `localStorage.noDeprecation = true` is set, then it is a no-op. - * - * If `localStorage.throwDeprecation = true` is set, then deprecated functions - * will throw an Error when invoked. - * - * If `localStorage.traceDeprecation = true` is set, then deprecated functions - * will invoke `console.trace()` instead of `console.error()`. - * - * @param {Function} fn - the function to deprecate - * @param {String} msg - the string to print to the console when `fn` is invoked - * @returns {Function} a new "deprecated" version of `fn` - * @api public - */ - -function deprecate (fn, msg) { - if (config('noDeprecation')) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (config('throwDeprecation')) { - throw new Error(msg); - } else if (config('traceDeprecation')) { - console.trace(msg); - } else { - console.warn(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -} - -/** - * Checks `localStorage` for boolean values for the given `name`. - * - * @param {String} name - * @returns {Boolean} - * @api private - */ - -function config (name) { - // accessing global.localStorage can trigger a DOMException in sandboxed iframes - try { - if (!global.localStorage) return false; - } catch (_) { - return false; - } - var val = global.localStorage[name]; - if (null == val) return false; - return String(val).toLowerCase() === 'true'; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js deleted file mode 100644 index 5e6fcff5ddd..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/node.js +++ /dev/null @@ -1,6 +0,0 @@ - -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ - -module.exports = require('util').deprecate; diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json deleted file mode 100644 index d350c79f1b7..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/util-deprecate/package.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ] - ], - "_from": "util-deprecate@>=1.0.1 <1.1.0", - "_id": "util-deprecate@1.0.2", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream/util-deprecate", - "_nodeVersion": "4.1.2", - "_npmUser": { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - "_npmVersion": "2.14.4", - "_phantomChildren": {}, - "_requested": { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_shrinkwrap": null, - "_spec": "util-deprecate@~1.0.1", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, - "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" - }, - "dependencies": {}, - "description": "The Node.js `util.deprecate()` function with browser support", - "devDependencies": {}, - "directories": {}, - "dist": { - "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - }, - "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", - "homepage": "https://github.com/TooTallNate/util-deprecate", - "keywords": [ - "util", - "deprecate", - "browserify", - "browser", - "node" - ], - "license": "MIT", - "main": "node.js", - "maintainers": [ - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - } - ], - "name": "util-deprecate", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json deleted file mode 100644 index df563e83657..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/package.json +++ /dev/null @@ -1,113 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream" - ] - ], - "_from": "readable-stream@>=2.0.0 <2.1.0", - "_id": "readable-stream@2.0.6", - "_inCache": true, - "_location": "/mississippi/concat-stream/readable-stream", - "_nodeVersion": "5.7.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.6.0", - "_phantomChildren": {}, - "_requested": { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/concat-stream" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", - "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "_shrinkwrap": null, - "_spec": "readable-stream@~2.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream", - "browser": { - "util": false - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "tap": "~0.2.6", - "tape": "~4.5.1", - "zuul": "~3.9.0" - }, - "directories": {}, - "dist": { - "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz" - }, - "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e", - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "maintainers": [ - { - "name": "isaacs", - "email": "isaacs@npmjs.com" - }, - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - }, - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "readable-stream", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "browser": "npm run write-zuul && zuul -- test/browser.js", - "test": "tap test/parallel/*.js test/ours/*.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "version": "2.0.6" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a5516..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js deleted file mode 100644 index 6222a579864..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,12 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f0780e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js b/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3c1..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json index abbb3d43623..cd01702872c 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/concat-stream/package.json @@ -14,22 +14,20 @@ ] ], "_from": "concat-stream@>=1.5.0 <2.0.0", - "_id": "concat-stream@1.5.2", + "_id": "concat-stream@1.6.0", "_inCache": true, "_location": "/mississippi/concat-stream", - "_nodeVersion": "4.4.3", + "_nodeVersion": "4.6.2", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/concat-stream-1.5.2.tgz_1472715196934_0.010375389130786061" + "tmp": "tmp/concat-stream-1.6.0.tgz_1482162257023_0.2988202746491879" }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.15.9", - "_phantomChildren": { - "inherits": "2.0.3" - }, + "_npmVersion": "2.15.11", + "_phantomChildren": {}, "_requested": { "raw": "concat-stream@^1.5.0", "scope": null, @@ -42,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.5.2.tgz", - "_shasum": "708978624d856af41a5a741defdd261da752c266", + "_resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.0.tgz", + "_shasum": "0aac662fd52be78964d5532f694784e70110acf7", "_shrinkwrap": null, "_spec": "concat-stream@^1.5.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -55,18 +53,18 @@ "url": "http://github.com/maxogden/concat-stream/issues" }, "dependencies": { - "inherits": "~2.0.1", - "readable-stream": "~2.0.0", - "typedarray": "~0.0.5" + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" }, "description": "writable stream that concatenates strings or binary data and calls a callback with the result", "devDependencies": { - "tape": "~2.3.2" + "tape": "^4.6.3" }, "directories": {}, "dist": { - "shasum": "708978624d856af41a5a741defdd261da752c266", - "tarball": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.5.2.tgz" + "shasum": "0aac662fd52be78964d5532f694784e70110acf7", + "tarball": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.0.tgz" }, "engines": [ "node >= 0.8" @@ -74,8 +72,8 @@ "files": [ "index.js" ], - "gitHead": "731fedd137eae89d066c249fdca070f8f16afbb8", - "homepage": "https://github.com/maxogden/concat-stream#readme", + "gitHead": "e482281642c1e011fc158f5749ef40a71c77a426", + "homepage": "https://github.com/maxogden/concat-stream", "license": "MIT", "main": "index.js", "maintainers": [ @@ -120,5 +118,5 @@ "android-browser/4.2..latest" ] }, - "version": "1.5.2" + "version": "1.6.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md b/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md index a2e1d2b1517..94787219906 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md +++ b/deps/npm/node_modules/mississippi/node_modules/concat-stream/readme.md @@ -16,7 +16,7 @@ There are also `objectMode` streams that emit things other than Buffers, and you ## Related -`stream-each` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. +`concat-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. ### examples diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md b/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md index 6ed497b4ad1..27669f6b6b9 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/README.md @@ -3,7 +3,7 @@ Turn a writeable and readable stream into a single streams2 duplex stream. Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input -and it allows you to set the readable and writable part asynchroniously using `setReadable(stream)` and `setWritable(stream)` +and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)` ``` npm install duplexify @@ -27,7 +27,7 @@ dup.on('data', function(data) { }) ``` -You can also set the readable and writable parts asynchroniously +You can also set the readable and writable parts asynchronously ``` js var dup = duplexify() diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js b/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js index 377eb60ad78..a04f124fa93 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/index.js @@ -158,7 +158,8 @@ Duplexify.prototype._forward = function() { var data - while ((data = shift(this._readable2)) !== null) { + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue this._drained = this.push(data) } diff --git a/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json b/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json index 485f0312ea8..a8de9a133f9 100644 --- a/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/duplexify/package.json @@ -14,13 +14,13 @@ ] ], "_from": "duplexify@>=3.4.2 <4.0.0", - "_id": "duplexify@3.4.5", + "_id": "duplexify@3.5.0", "_inCache": true, "_location": "/mississippi/duplexify", - "_nodeVersion": "4.4.3", + "_nodeVersion": "4.6.1", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/duplexify-3.4.5.tgz_1468011872327_0.44416941492818296" + "tmp": "tmp/duplexify-3.5.0.tgz_1477317448157_0.2257942291907966" }, "_npmUser": { "name": "mafintosh", @@ -43,8 +43,8 @@ "/mississippi", "/mississippi/pumpify" ], - "_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.4.5.tgz", - "_shasum": "0e7e287a775af753bf57e6e7b7f21f183f6c3a53", + "_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.5.0.tgz", + "_shasum": "1aa773002e1578457e9d9d4a50b0ccaaebcbd604", "_shrinkwrap": null, "_spec": "duplexify@^3.4.2", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -60,7 +60,7 @@ "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" }, - "description": "Turn a writeable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", + "description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", "devDependencies": { "concat-stream": "^1.4.6", "tape": "^2.13.3", @@ -68,10 +68,10 @@ }, "directories": {}, "dist": { - "shasum": "0e7e287a775af753bf57e6e7b7f21f183f6c3a53", - "tarball": "https://registry.npmjs.org/duplexify/-/duplexify-3.4.5.tgz" + "shasum": "1aa773002e1578457e9d9d4a50b0ccaaebcbd604", + "tarball": "https://registry.npmjs.org/duplexify/-/duplexify-3.5.0.tgz" }, - "gitHead": "338de6776ce9b25d7ab6e91d766166245a8f070a", + "gitHead": "97f525d36ce275e52435611d70b3a77a7234eaa1", "homepage": "https://github.com/mafintosh/duplexify", "keywords": [ "duplex", @@ -100,5 +100,5 @@ "scripts": { "test": "tape test.js" }, - "version": "3.4.5" + "version": "3.5.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore index 3c3629e647f..3e70011a19a 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/.npmignore @@ -1 +1,3 @@ node_modules +bundle.js +test.html diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js new file mode 100644 index 00000000000..fa6b5dab255 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/example.js @@ -0,0 +1,22 @@ +var writer = require('./') + +var ws = writer(write, flush) + +ws.on('finish', function () { + console.log('finished') +}) + +ws.write('hello') +ws.write('world') +ws.end() + +function write (data, enc, cb) { + // i am your normal ._write method + console.log('writing', data.toString()) + cb() +} + +function flush (cb) { + // i am called before finish is emitted + setTimeout(cb, 1000) // wait 1 sec +} diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js index d7715734b4d..e82e126126f 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/index.js @@ -1,5 +1,5 @@ var stream = require('readable-stream') -var util = require('util') +var inherits = require('inherits') var SIGNAL_FLUSH = new Buffer([0]) @@ -21,7 +21,7 @@ function WriteStream (opts, write, flush) { this._flush = flush || null } -util.inherits(WriteStream, stream.Writable) +inherits(WriteStream, stream.Writable) WriteStream.obj = function (opts, worker, flush) { if (typeof opts === 'function') return WriteStream.obj(null, opts, worker) @@ -47,6 +47,6 @@ WriteStream.prototype.end = function (data, enc, cb) { WriteStream.prototype.destroy = function (err) { if (this.destroyed) return this.destroyed = true - if (err) this.emit('error') + if (err) this.emit('error', err) this.emit('close') } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json index a73eaeb36ef..cd239a22b51 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/package.json @@ -14,15 +14,19 @@ ] ], "_from": "flush-write-stream@>=1.0.0 <2.0.0", - "_id": "flush-write-stream@1.0.0", + "_id": "flush-write-stream@1.0.2", "_inCache": true, "_location": "/mississippi/flush-write-stream", - "_nodeVersion": "4.1.1", + "_nodeVersion": "4.2.6", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/flush-write-stream-1.0.2.tgz_1476614807882_0.22224654001183808" + }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.4", + "_npmVersion": "2.14.12", "_phantomChildren": {}, "_requested": { "raw": "flush-write-stream@^1.0.0", @@ -36,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.0.tgz", - "_shasum": "cc4fc24f4b4c973f80027f27cc095841639965a7", + "_resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.2.tgz", + "_shasum": "c81b90d8746766f1a609a46809946c45dd8ae417", "_shrinkwrap": null, "_spec": "flush-write-stream@^1.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -49,6 +53,7 @@ "url": "https://github.com/mafintosh/flush-write-stream/issues" }, "dependencies": { + "inherits": "^2.0.1", "readable-stream": "^2.0.4" }, "description": "A write stream constructor that supports a flush function that is called before finish is emitted", @@ -57,10 +62,10 @@ }, "directories": {}, "dist": { - "shasum": "cc4fc24f4b4c973f80027f27cc095841639965a7", - "tarball": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.0.tgz" + "shasum": "c81b90d8746766f1a609a46809946c45dd8ae417", + "tarball": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.2.tgz" }, - "gitHead": "50e81d8eeee8a9666c7d5105775a6c89b7ae9dfa", + "gitHead": "d35a4071dacbcc60fc40d798fa58fc425cba3efc", "homepage": "https://github.com/mafintosh/flush-write-stream", "license": "MIT", "main": "index.js", @@ -80,5 +85,5 @@ "scripts": { "test": "tape test.js" }, - "version": "1.0.0" + "version": "1.0.2" } diff --git a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js index 7383acede6f..6cd0c20e1f7 100644 --- a/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js +++ b/deps/npm/node_modules/mississippi/node_modules/flush-write-stream/test.js @@ -70,3 +70,16 @@ tape('can pass options', function (t) { process.nextTick(cb) } }) + +tape('emits error on destroy', function (t) { + var expected = new Error() + + var ws = writer({objectMode: true}, function () {}) + + ws.on('error', function (err) { + t.equal(err, expected) + }) + ws.on('close', t.end) + + ws.destroy(expected) +}) diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/.npmignore similarity index 100% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.npmignore rename to deps/npm/node_modules/mississippi/node_modules/parallel-transform/.npmignore diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE new file mode 100644 index 00000000000..4b30ed5d950 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/LICENSE @@ -0,0 +1,20 @@ +Copyright 2013 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md new file mode 100644 index 00000000000..f53e1308493 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/README.md @@ -0,0 +1,54 @@ +# parallel-transform + +[Transform stream](http://nodejs.org/api/stream.html#stream_class_stream_transform_1) for Node.js that allows you to run your transforms +in parallel without changing the order of the output. + + npm install parallel-transform + +It is easy to use + +``` js +var transform = require('parallel-transform'); + +var stream = transform(10, function(data, callback) { // 10 is the parallism level + setTimeout(function() { + callback(null, data); + }, Math.random() * 1000); +}); + +for (var i = 0; i < 10; i++) { + stream.write(''+i); +} +stream.end(); + +stream.on('data', function(data) { + console.log(data); // prints 0,1,2,... +}); +stream.on('end', function() { + console.log('stream has ended'); +}); +``` + +If you run the above example you'll notice that it runs in parallel +(does not take ~1 second between each print) and that the order is preserved + +## Stream options + +All transforms are Node 0.10 streams. Per default they are created with the options `{objectMode:true}`. +If you want to use your own stream options pass them as the second parameter + +``` js +var stream = transform(10, {objectMode:false}, function(data, callback) { + // data is now a buffer + callback(null, data); +}); + +fs.createReadStream('filename').pipe(stream).pipe(process.stdout); +``` + +### Unordered +Passing the option `{ordered:false}` will output the data as soon as it's processed by a transform, without waiting to respect the order. + +## License + +MIT \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js new file mode 100644 index 00000000000..77329e4ccfe --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/index.js @@ -0,0 +1,105 @@ +var Transform = require('readable-stream').Transform; +var inherits = require('inherits'); +var cyclist = require('cyclist'); +var util = require('util'); + +var ParallelTransform = function(maxParallel, opts, ontransform) { + if (!(this instanceof ParallelTransform)) return new ParallelTransform(maxParallel, opts, ontransform); + + if (typeof maxParallel === 'function') { + ontransform = maxParallel; + opts = null; + maxParallel = 1; + } + if (typeof opts === 'function') { + ontransform = opts; + opts = null; + } + + if (!opts) opts = {}; + if (!opts.highWaterMark) opts.highWaterMark = Math.max(maxParallel, 16); + if (opts.objectMode !== false) opts.objectMode = true; + + Transform.call(this, opts); + + this._maxParallel = maxParallel; + this._ontransform = ontransform; + this._destroyed = false; + this._flushed = false; + this._ordered = opts.ordered !== false; + this._buffer = this._ordered ? cyclist(maxParallel) : []; + this._top = 0; + this._bottom = 0; + this._ondrain = null; +}; + +inherits(ParallelTransform, Transform); + +ParallelTransform.prototype.destroy = function() { + if (this._destroyed) return; + this._destroyed = true; + this.emit('close'); +}; + +ParallelTransform.prototype._transform = function(chunk, enc, callback) { + var self = this; + var pos = this._top++; + + this._ontransform(chunk, function(err, data) { + if (self._destroyed) return; + if (err) { + self.emit('error', err); + self.push(null); + self.destroy(); + return; + } + if (self._ordered) { + self._buffer.put(pos, (data === undefined || data === null) ? null : data); + } + else { + self._buffer.push(data); + } + self._drain(); + }); + + if (this._top - this._bottom < this._maxParallel) return callback(); + this._ondrain = callback; +}; + +ParallelTransform.prototype._flush = function(callback) { + this._flushed = true; + this._ondrain = callback; + this._drain(); +}; + +ParallelTransform.prototype._drain = function() { + if (this._ordered) { + while (this._buffer.get(this._bottom) !== undefined) { + var data = this._buffer.del(this._bottom++); + if (data === null) continue; + this.push(data); + } + } + else { + while (this._buffer.length > 0) { + var data = this._buffer.pop(); + this._bottom++; + if (data === null) continue; + this.push(data); + } + } + + + if (!this._drained() || !this._ondrain) return; + + var ondrain = this._ondrain; + this._ondrain = null; + ondrain(); +}; + +ParallelTransform.prototype._drained = function() { + var diff = this._top - this._bottom; + return this._flushed ? !diff : diff < this._maxParallel; +}; + +module.exports = ParallelTransform; diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore new file mode 100644 index 00000000000..ba99195bae8 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/.npmignore @@ -0,0 +1 @@ +bench diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md new file mode 100644 index 00000000000..50c35cc5fd4 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/README.md @@ -0,0 +1,39 @@ +# Cyclist + +Cyclist is an efficient [cyclic list](http://en.wikipedia.org/wiki/Circular_buffer) implemention for Javascript. +It is available through npm + + npm install cyclist + +## What? + +Cyclist allows you to create a list of fixed size that is cyclic. +In a cyclist list the element following the last one is the first one. +This property can be really useful when for example trying to order data +packets that can arrive out of order over a network stream. + +## Usage + +``` js +var cyclist = require('cyclist'); +var list = cyclist(4); // if size (4) is not a power of 2 it will be the follwing power of 2 + // this buffer can now hold 4 elements in total + +list.put(42, 'hello 42'); // store something and index 42 +list.put(43, 'hello 43'); // store something and index 43 + +console.log(list.get(42)); // prints hello 42 +console.log(list.get(46)); // prints hello 42 again since 46 - 42 == list.size +``` + +## API + +* `cyclist(size)` creates a new buffer +* `cyclist#get(index)` get an object stored in the buffer +* `cyclist#put(index,value)` insert an object into the buffer +* `cyclist#del(index)` delete an object from an index +* `cyclist#size` property containing current size of buffer + +## License + +MIT diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js new file mode 100644 index 00000000000..baf710c3a9a --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/index.js @@ -0,0 +1,33 @@ +var ensureTwoPower = function(n) { + if (n && !(n & (n - 1))) return n; + var p = 1; + while (p < n) p <<= 1; + return p; +}; + +var Cyclist = function(size) { + if (!(this instanceof Cyclist)) return new Cyclist(size); + size = ensureTwoPower(size); + this.mask = size-1; + this.size = size; + this.values = new Array(size); +}; + +Cyclist.prototype.put = function(index, val) { + var pos = index & this.mask; + this.values[pos] = val; + return pos; +}; + +Cyclist.prototype.get = function(index) { + return this.values[index & this.mask]; +}; + +Cyclist.prototype.del = function(index) { + var pos = index & this.mask; + var val = this.values[pos]; + this.values[pos] = undefined; + return val; +}; + +module.exports = Cyclist; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json new file mode 100644 index 00000000000..ad045eb9714 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/node_modules/cyclist/package.json @@ -0,0 +1,80 @@ +{ + "_args": [ + [ + { + "raw": "cyclist@~0.2.2", + "scope": null, + "escapedName": "cyclist", + "name": "cyclist", + "rawSpec": "~0.2.2", + "spec": ">=0.2.2 <0.3.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/parallel-transform" + ] + ], + "_from": "cyclist@>=0.2.2 <0.3.0", + "_id": "cyclist@0.2.2", + "_inCache": true, + "_location": "/mississippi/parallel-transform/cyclist", + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "1.3.5", + "_phantomChildren": {}, + "_requested": { + "raw": "cyclist@~0.2.2", + "scope": null, + "escapedName": "cyclist", + "name": "cyclist", + "rawSpec": "~0.2.2", + "spec": ">=0.2.2 <0.3.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/parallel-transform" + ], + "_resolved": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz", + "_shasum": "1b33792e11e914a2fd6d6ed6447464444e5fa640", + "_shrinkwrap": null, + "_spec": "cyclist@~0.2.2", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/parallel-transform", + "author": { + "name": "Mathias Buus Madsen", + "email": "mathiasbuus@gmail.com" + }, + "bugs": { + "url": "https://github.com/mafintosh/cyclist/issues" + }, + "dependencies": {}, + "description": "Cyclist is an efficient cyclic list implemention.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "1b33792e11e914a2fd6d6ed6447464444e5fa640", + "tarball": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz" + }, + "homepage": "https://github.com/mafintosh/cyclist#readme", + "keywords": [ + "circular", + "buffer", + "ring", + "cyclic", + "data" + ], + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "cyclist", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/cyclist.git" + }, + "version": "0.2.2" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json new file mode 100644 index 00000000000..357b4898a66 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/parallel-transform/package.json @@ -0,0 +1,92 @@ +{ + "_args": [ + [ + { + "raw": "parallel-transform@^1.1.0", + "scope": null, + "escapedName": "parallel-transform", + "name": "parallel-transform", + "rawSpec": "^1.1.0", + "spec": ">=1.1.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi" + ] + ], + "_from": "parallel-transform@>=1.1.0 <2.0.0", + "_id": "parallel-transform@1.1.0", + "_inCache": true, + "_location": "/mississippi/parallel-transform", + "_nodeVersion": "4.6.1", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/parallel-transform-1.1.0.tgz_1478596056784_0.9169374129269272" + }, + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "2.15.9", + "_phantomChildren": {}, + "_requested": { + "raw": "parallel-transform@^1.1.0", + "scope": null, + "escapedName": "parallel-transform", + "name": "parallel-transform", + "rawSpec": "^1.1.0", + "spec": ">=1.1.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi" + ], + "_resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz", + "_shasum": "d410f065b05da23081fcd10f28854c29bda33b06", + "_shrinkwrap": null, + "_spec": "parallel-transform@^1.1.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", + "author": { + "name": "Mathias Buus Madsen", + "email": "mathiasbuus@gmail.com" + }, + "bugs": { + "url": "https://github.com/mafintosh/parallel-transform/issues" + }, + "dependencies": { + "cyclist": "~0.2.2", + "inherits": "^2.0.3", + "readable-stream": "^2.1.5" + }, + "description": "Transform stream that allows you to run your transforms in parallel without changing the order", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "d410f065b05da23081fcd10f28854c29bda33b06", + "tarball": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz" + }, + "gitHead": "1b4919bc318eb0cbd6e8ee08c4d56f405d74e643", + "homepage": "https://github.com/mafintosh/parallel-transform", + "keywords": [ + "transform", + "stream", + "parallel", + "preserve", + "order" + ], + "license": "MIT", + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "parallel-transform", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/parallel-transform.git" + }, + "scripts": {}, + "version": "1.1.0" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/index.js b/deps/npm/node_modules/mississippi/node_modules/pump/index.js index 843da727ef8..060ce5f4fd3 100644 --- a/deps/npm/node_modules/mississippi/node_modules/pump/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/pump/index.js @@ -9,6 +9,7 @@ var isFn = function (fn) { } var isFS = function (stream) { + if (!fs) return false // browser return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) } diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/package.json b/deps/npm/node_modules/mississippi/node_modules/pump/package.json index 2457d2f00d2..2226426b52a 100644 --- a/deps/npm/node_modules/mississippi/node_modules/pump/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/pump/package.json @@ -14,15 +14,19 @@ ] ], "_from": "pump@>=1.0.0 <2.0.0", - "_id": "pump@1.0.1", + "_id": "pump@1.0.2", "_inCache": true, "_location": "/mississippi/pump", - "_nodeVersion": "4.1.1", + "_nodeVersion": "4.6.2", + "_npmOperationalInternal": { + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/pump-1.0.2.tgz_1482243286673_0.09530888125300407" + }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.4", + "_npmVersion": "2.15.11", "_phantomChildren": {}, "_requested": { "raw": "pump@^1.0.0", @@ -37,8 +41,8 @@ "/mississippi", "/mississippi/pumpify" ], - "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.1.tgz", - "_shasum": "f1f1409fb9bd1085bbdb576b43b84ec4b5eadc1a", + "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz", + "_shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51", "_shrinkwrap": null, "_spec": "pump@^1.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -46,6 +50,9 @@ "name": "Mathias Buus Madsen", "email": "mathiasbuus@gmail.com" }, + "browser": { + "fs": false + }, "bugs": { "url": "https://github.com/mafintosh/pump/issues" }, @@ -57,10 +64,10 @@ "devDependencies": {}, "directories": {}, "dist": { - "shasum": "f1f1409fb9bd1085bbdb576b43b84ec4b5eadc1a", - "tarball": "https://registry.npmjs.org/pump/-/pump-1.0.1.tgz" + "shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51", + "tarball": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz" }, - "gitHead": "6abb030191e1ccb12c5f735a4f39162307f93b90", + "gitHead": "90ed7ae8923ade7c7589e3db28c29fbc5c2d42ca", "homepage": "https://github.com/mafintosh/pump", "keywords": [ "streams", @@ -85,5 +92,5 @@ "scripts": { "test": "node test.js" }, - "version": "1.0.1" + "version": "1.0.2" } diff --git a/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js b/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js new file mode 100644 index 00000000000..80e852c7dcb --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/pump/test-browser.js @@ -0,0 +1,58 @@ +var stream = require('stream') +var pump = require('./index') + +var rs = new stream.Readable() +var ws = new stream.Writable() + +rs._read = function (size) { + this.push(Buffer(size).fill('abc')) +} + +ws._write = function (chunk, encoding, cb) { + setTimeout(function () { + cb() + }, 100) +} + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) console.log('done') +} + +ws.on('finish', function () { + wsClosed = true + check() +}) + +rs.on('end', function () { + rsClosed = true + check() +}) + +pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +setTimeout(function () { + rs.push(null) + rs.emit('close') +}, 1000) + +setTimeout(function () { + if (!check()) throw new Error('timeout') +}, 5000) diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js index ea8d112f981..2c07e957a3b 100644 --- a/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/index.js @@ -1,4 +1,5 @@ var eos = require('end-of-stream') +var shift = require('stream-shift') module.exports = each @@ -41,7 +42,7 @@ function each (stream, fn, cb) { if (ended || running) return want = false - var data = stream.read() + var data = shift(stream) if (!data) { want = true return diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.npmignore similarity index 100% rename from deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.npmignore diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml similarity index 58% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml index cc4dba29d95..ecd4193f602 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/isarray/.travis.yml +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/.travis.yml @@ -1,4 +1,6 @@ language: node_js node_js: - - "0.8" - "0.10" + - "0.12" + - "4" + - "6" diff --git a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE similarity index 79% rename from deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md rename to deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE index c67e3532b54..bae9da7bfae 100644 --- a/deps/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream/node_modules/process-nextick-args/license.md +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/LICENSE @@ -1,4 +1,6 @@ -# Copyright (c) 2015 Calvin Metcalf +The MIT License (MIT) + +Copyright (c) 2016 Mathias Buus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -7,13 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md new file mode 100644 index 00000000000..d9cc2d945fa --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/README.md @@ -0,0 +1,25 @@ +# stream-shift + +Returns the next buffer/object in a stream's readable queue + +``` +npm install stream-shift +``` + +[![build status](http://img.shields.io/travis/mafintosh/stream-shift.svg?style=flat)](http://travis-ci.org/mafintosh/stream-shift) + +## Usage + +``` js +var shift = require('stream-shift') + +console.log(shift(someStream)) // first item in its buffer +``` + +## Credit + +Thanks [@dignifiedquire](https://github.com/dignifiedquire) for making this work on node 6 + +## License + +MIT diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js new file mode 100644 index 00000000000..c4b18b9c2a5 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/index.js @@ -0,0 +1,20 @@ +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return rs.objectMode ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + + return state.length +} diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json new file mode 100644 index 00000000000..54bd23eb8e6 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/package.json @@ -0,0 +1,100 @@ +{ + "_args": [ + [ + { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/duplexify" + ], + [ + { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/stream-each" + ] + ], + "_from": "stream-shift@^1.0.0", + "_id": "stream-shift@1.0.0", + "_inCache": true, + "_location": "/mississippi/stream-each/stream-shift", + "_nodeVersion": "4.4.3", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/stream-shift-1.0.0.tgz_1468011662152_0.6510484367609024" + }, + "_npmUser": { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + }, + "_npmVersion": "2.15.9", + "_phantomChildren": {}, + "_requested": { + "raw": "stream-shift@^1.0.0", + "scope": null, + "escapedName": "stream-shift", + "name": "stream-shift", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/stream-each" + ], + "_resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", + "_shasum": "d5c752825e5367e786f78e18e445ea223a155952", + "_shrinkwrap": null, + "_spec": "stream-shift@^1.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/stream-each", + "author": { + "name": "Mathias Buus", + "url": "@mafintosh" + }, + "bugs": { + "url": "https://github.com/mafintosh/stream-shift/issues" + }, + "dependencies": {}, + "description": "Returns the next buffer/object in a stream's readable queue", + "devDependencies": { + "standard": "^7.1.2", + "tape": "^4.6.0", + "through2": "^2.0.1" + }, + "directories": {}, + "dist": { + "shasum": "d5c752825e5367e786f78e18e445ea223a155952", + "tarball": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz" + }, + "gitHead": "2ea5f7dcd8ac6babb08324e6e603a3269252a2c4", + "homepage": "https://github.com/mafintosh/stream-shift", + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "mafintosh", + "email": "mathiasbuus@gmail.com" + } + ], + "name": "stream-shift", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/stream-shift.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.0" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js new file mode 100644 index 00000000000..c0222c37d53 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/node_modules/stream-shift/test.js @@ -0,0 +1,48 @@ +var tape = require('tape') +var through = require('through2') +var stream = require('stream') +var shift = require('./') + +tape('shifts next', function (t) { + var passthrough = through() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with core', function (t) { + var passthrough = stream.PassThrough() + + passthrough.write('hello') + passthrough.write('world') + + t.same(shift(passthrough), Buffer('hello')) + t.same(shift(passthrough), Buffer('world')) + t.end() +}) + +tape('shifts next with object mode', function (t) { + var passthrough = through({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) + +tape('shifts next with object mode with core', function (t) { + var passthrough = stream.PassThrough({objectMode: true}) + + passthrough.write({hello: 1}) + passthrough.write({world: 1}) + + t.same(shift(passthrough), {hello: 1}) + t.same(shift(passthrough), {world: 1}) + t.end() +}) diff --git a/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json b/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json index d73cd30d829..f8594afda69 100644 --- a/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/stream-each/package.json @@ -14,19 +14,19 @@ ] ], "_from": "stream-each@>=1.1.0 <2.0.0", - "_id": "stream-each@1.1.2", + "_id": "stream-each@1.2.0", "_inCache": true, "_location": "/mississippi/stream-each", - "_nodeVersion": "4.2.3", + "_nodeVersion": "4.6.1", "_npmOperationalInternal": { - "host": "packages-5-east.internal.npmjs.com", - "tmp": "tmp/stream-each-1.1.2.tgz_1454600601831_0.7560806761030108" + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/stream-each-1.2.0.tgz_1478427484733_0.5437065886799246" }, "_npmUser": { "name": "mafintosh", "email": "mathiasbuus@gmail.com" }, - "_npmVersion": "2.14.7", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { "raw": "stream-each@^1.1.0", @@ -40,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.1.2.tgz", - "_shasum": "7d4f887f24c721ab0155b12a34263d8732ad8d39", + "_resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.0.tgz", + "_shasum": "1e95d47573f580d814dc0ff8cd0f66f1ce53c991", "_shrinkwrap": null, "_spec": "stream-each@^1.1.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -53,7 +53,8 @@ "url": "https://github.com/mafintosh/stream-each/issues" }, "dependencies": { - "end-of-stream": "^1.1.0" + "end-of-stream": "^1.1.0", + "stream-shift": "^1.0.0" }, "description": "Iterate all the data in a stream", "devDependencies": { @@ -63,10 +64,10 @@ }, "directories": {}, "dist": { - "shasum": "7d4f887f24c721ab0155b12a34263d8732ad8d39", - "tarball": "https://registry.npmjs.org/stream-each/-/stream-each-1.1.2.tgz" + "shasum": "1e95d47573f580d814dc0ff8cd0f66f1ce53c991", + "tarball": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.0.tgz" }, - "gitHead": "f0ef91ddbe95688e376598b9959cab463c733b57", + "gitHead": "2968bf4f195641f5eda594c781a0b590776bc702", "homepage": "https://github.com/mafintosh/stream-each", "license": "MIT", "main": "index.js", @@ -94,5 +95,5 @@ "scripts": { "test": "standard && tape test.js" }, - "version": "1.1.2" + "version": "1.2.0" } diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE deleted file mode 100644 index f6a0029de11..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE +++ /dev/null @@ -1,39 +0,0 @@ -Copyright 2013, Rod Vagg (the "Original Author") -All rights reserved. - -MIT +no-false-attribs License - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -Distributions of all or part of the Software intended to be used -by the recipients as they would use the unmodified Software, -containing modifications that substantially alter, remove, or -disable functionality of the Software, outside of the documented -configuration mechanisms provided by the Software, shall be -modified such that the Original Author's bug reporting email -addresses and urls are either replaced with the contact information -of the parties responsible for the changes, or removed entirely. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - - -Except where noted, this license applies to any and all software -programs and associated documentation files created by the -Original Author, when distributed with the Software. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html new file mode 100644 index 00000000000..ac478189ea2 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.html @@ -0,0 +1,336 @@ + + + + + + + + + + + + + + +
+

The MIT License (MIT)

+

Copyright (c) 2016 Rod Vagg (the "Original Author") and additional contributors

+

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

+

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

+

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+
+
+ + \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md new file mode 100644 index 00000000000..7f0b93daaa4 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/LICENSE.md @@ -0,0 +1,9 @@ +# The MIT License (MIT) + +**Copyright (c) 2016 Rod Vagg (the "Original Author") and additional contributors** + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/README.md index c84b3464ada..a916f15ef5e 100644 --- a/deps/npm/node_modules/mississippi/node_modules/through2/README.md +++ b/deps/npm/node_modules/mississippi/node_modules/through2/README.md @@ -20,6 +20,9 @@ fs.createReadStream('ex.txt') callback() })) .pipe(fs.createWriteStream('out.txt')) + .on('finish', function () { + doSomethingSpecial() + }) ``` Or object streams: diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore deleted file mode 100644 index 38344f87a62..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml deleted file mode 100644 index 1b82118460c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - npm install -g npm -notifications: - email: false -matrix: - fast_finish: true - allow_failures: - - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - include: - - node_js: '0.8' - env: TASK=test - - node_js: '0.10' - env: TASK=test - - node_js: '0.11' - env: TASK=test - - node_js: '0.12' - env: TASK=test - - node_js: 1 - env: TASK=test - - node_js: 2 - env: TASK=test - - node_js: 3 - env: TASK=test - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" -script: "npm run $TASK" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml deleted file mode 100644 index 96d9cfbd386..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/.zuul.yml +++ /dev/null @@ -1 +0,0 @@ -ui: tape diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE deleted file mode 100644 index e3d4e695a4c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md deleted file mode 100644 index 1a67c48cd03..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# readable-stream - -***Node-core v5.8.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.markdown). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams WG Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown deleted file mode 100644 index 0bc3819e63b..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/stream.markdown +++ /dev/null @@ -1,1760 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface implemented by various objects in -Node.js. For example a [request to an HTTP server][http-incoming-message] is a -stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All -streams are instances of [`EventEmitter`][]. - -You can load the Stream base classes by doing `require('stream')`. -There are base classes provided for [Readable][] streams, [Writable][] -streams, [Duplex][] streams, and [Transform][] streams. - -This document is split up into 3 sections: - -1. The first section explains the parts of the API that you need to be - aware of to use streams in your programs. -2. The second section explains the parts of the API that you need to - use if you implement your own custom streams yourself. The API is designed to - make this easy for you to do. -3. The third section goes into more depth about how streams work, - including some of the internal mechanisms and functions that you - should probably not modify unless you definitely know what you are - doing. - - -## API for Stream Consumers - - - -Streams can be either [Readable][], [Writable][], or both ([Duplex][]). - -All streams are EventEmitters, but they also have other custom methods -and properties depending on whether they are Readable, Writable, or -Duplex. - -If a stream is both Readable and Writable, then it implements all of -the methods and events. So, a [Duplex][] or [Transform][] stream is -fully described by this API, though their implementation may be -somewhat different. - -It is not necessary to implement Stream interfaces in order to consume -streams in your programs. If you **are** implementing streaming -interfaces in your own program, please also refer to -[API for Stream Implementors][]. - -Almost all Node.js programs, no matter how simple, use Streams in some -way. Here is an example of using Streams in an Node.js program: - -```js -const http = require('http'); - -var server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - var body = ''; - // we want to get the data as utf8 strings - // If you don't set an encoding, then you'll get Buffer objects - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event tells you that you have entire body - req.on('end', () => { - try { - var data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -### Class: stream.Duplex - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Readable - - - -The Readable stream interface is the abstraction for a *source* of -data that you are reading from. In other words, data comes *out* of a -Readable stream. - -A Readable stream will not start emitting data until you indicate that -you are ready to receive it. - -Readable streams have two "modes": a **flowing mode** and a **paused -mode**. When in flowing mode, data is read from the underlying system -and provided to your program as fast as possible. In paused mode, you -must explicitly call [`stream.read()`][stream-read] to get chunks of data out. -Streams start out in paused mode. - -**Note**: If no data event handlers are attached, and there are no -[`stream.pipe()`][] destinations, and the stream is switched into flowing -mode, then data will be lost. - -You can switch to flowing mode by doing any of the following: - -* Adding a [`'data'`][] event handler to listen for data. -* Calling the [`stream.resume()`][stream-resume] method to explicitly open the - flow. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -You can switch back to paused mode by doing either of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -Note that, for backwards compatibility reasons, removing [`'data'`][] -event handlers will **not** automatically pause the stream. Also, if -there are piped destinations, then calling [`stream.pause()`][stream-pause] will -not guarantee that the stream will *remain* paused once those -destinations drain and ask for more data. - -Examples of readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -#### Event: 'close' - -Emitted when the stream and any of its underlying resources (a file -descriptor, for example) have been closed. The event indicates that -no more events will be emitted, and no further computation will occur. - -Not all streams will emit the `'close'` event. - -#### Event: 'data' - -* `chunk` {Buffer|String} The chunk of data. - -Attaching a `'data'` event listener to a stream that has not been -explicitly paused will switch the stream into flowing mode. Data will -then be passed as soon as it is available. - -If you just want to get all the data out of the stream as fast as -possible, this is the best way to do so. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -``` - -#### Event: 'end' - -This event fires when there will be no more data to read. - -Note that the `'end'` event **will not fire** unless the data is -completely consumed. This can be done by switching into flowing mode, -or by calling [`stream.read()`][stream-read] repeatedly until you get to the -end. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -readable.on('end', () => { - console.log('there will be no more data.'); -}); -``` - -#### Event: 'error' - -* {Error Object} - -Emitted if there was an error receiving data. - -#### Event: 'readable' - -When a chunk of data can be read from the stream, it will emit a -`'readable'` event. - -In some cases, listening for a `'readable'` event will cause some data -to be read into the internal buffer from the underlying system, if it -hadn't already. - -```javascript -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` - -Once the internal buffer is drained, a `'readable'` event will fire -again when more data is available. - -The `'readable'` event is not emitted in the "flowing" mode with the -sole exception of the last one, on end-of-stream. - -The `'readable'` event indicates that the stream has new information: -either new data is available or the end of the stream has been reached. -In the former case, [`stream.read()`][stream-read] will return that data. In the -latter case, [`stream.read()`][stream-read] will return null. For instance, in -the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -var rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -#### readable.isPaused() - -* Return: {Boolean} - -This method returns whether or not the `readable` has been **explicitly** -paused by client code (using [`stream.pause()`][stream-pause] without a -corresponding [`stream.resume()`][stream-resume]). - -```js -var readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -#### readable.pause() - -* Return: `this` - -This method will cause a stream in flowing mode to stop emitting -[`'data'`][] events, switching out of flowing mode. Any data that becomes -available will remain in the internal buffer. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); - readable.pause(); - console.log('there will be no more data for 1 second'); - setTimeout(() => { - console.log('now data will start flowing again'); - readable.resume(); - }, 1000); -}); -``` - -#### readable.pipe(destination[, options]) - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Default = `true` - -This method pulls all the data out of a readable stream, and writes it -to the supplied destination, automatically managing the flow so that -the destination is not overwhelmed by a fast readable stream. - -Multiple destinations can be piped to safely. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` - -This function returns the destination stream, so you can set up pipe -chains like so: - -```js -var r = fs.createReadStream('file.txt'); -var z = zlib.createGzip(); -var w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -For example, emulating the Unix `cat` command: - -```js -process.stdin.pipe(process.stdout); -``` - -By default [`stream.end()`][stream-end] is called on the destination when the -source stream emits [`'end'`][], so that `destination` is no longer writable. -Pass `{ end: false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -Note that [`process.stderr`][] and [`process.stdout`][] are never closed until -the process exits, regardless of the specified options. - -#### readable.read([size]) - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `read()` method pulls some data out of the internal buffer and -returns it. If there is no data available, then it will return -`null`. - -If you pass in a `size` argument, then it will return that many -bytes. If `size` bytes are not available, then it will return `null`, -unless we've ended, in which case it will return the data remaining -in the buffer. - -If you do not specify a `size` argument, then it will return all the -data in the internal buffer. - -This method should only be called in paused mode. In flowing mode, -this method is called automatically until the internal buffer is -drained. - -```js -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log('got %d bytes of data', chunk.length); - } -}); -``` - -If this method returns a data chunk, then it will also trigger the -emission of a [`'data'`][] event. - -Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been triggered will return `null`. No runtime error will be raised. - -#### readable.resume() - -* Return: `this` - -This method will cause the readable stream to resume emitting [`'data'`][] -events. - -This method will switch the stream into flowing mode. If you do *not* -want to consume the data from a stream, but you *do* want to get to -its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open -the flow of data. - -```js -var readable = getReadableStreamSomehow(); -readable.resume(); -readable.on('end', () => { - console.log('got to the end, but did not read anything'); -}); -``` - -#### readable.setEncoding(encoding) - -* `encoding` {String} The encoding to use. -* Return: `this` - -Call this function to cause the stream to return strings of the specified -encoding instead of Buffer objects. For example, if you do -`readable.setEncoding('utf8')`, then the output data will be interpreted as -UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, -then the data will be encoded in hexadecimal string format. - -This properly handles multi-byte characters that would otherwise be -potentially mangled if you simply pulled the Buffers directly and -called [`buf.toString(encoding)`][] on them. If you want to read the data -as strings, always use this method. - -Also you can disable any encoding at all with `readable.setEncoding(null)`. -This approach is very useful if you deal with binary data or with large -multi-byte strings spread out over multiple chunks. - -```js -var readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -#### readable.unpipe([destination]) - -* `destination` {stream.Writable} Optional specific stream to unpipe - -This method will remove the hooks set up for a previous [`stream.pipe()`][] -call. - -If the destination is not specified, then all pipes are removed. - -If the destination is specified, but no pipe is set up for it, then -this is a no-op. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('stop writing to file.txt'); - readable.unpipe(writable); - console.log('manually close the file stream'); - writable.end(); -}, 1000); -``` - -#### readable.unshift(chunk) - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -This is useful in certain cases where a stream is being consumed by a -parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source, so that the stream can be -passed on to some other party. - -Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event -has been triggered; a runtime error will be raised. - -If you find that you must often call `stream.unshift(chunk)` in your -programs, consider implementing a [Transform][] stream instead. (See [API -for Stream Implementors][].) - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - var decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - var remaining = split.join('\n\n'); - var buf = new Buffer(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `unshift()` is called during a -read (i.e. from within a [`stream._read()`][stream-_read] implementation on a -custom stream). Following the call to `unshift()` with an immediate -[`stream.push('')`][stream-push] will reset the reading state appropriately, -however it is best to simply avoid calling `unshift()` while in the process of -performing a read. - -#### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire Streams API as it is today. (See [Compatibility][] for -more information.) - -If you are using an older Node.js library that emits [`'data'`][] events and -has a [`stream.pause()`][stream-pause] method that is advisory only, then you -can use the `wrap()` method to create a [Readable][] stream that uses the old -stream as its data source. - -You will very rarely ever need to call this function, but it exists -as a convenience for interacting with old Node.js programs and libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Class: stream.Transform - -Transform streams are [Duplex][] streams where the output is in some way -computed from the input. They implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Writable - - - -The Writable stream interface is an abstraction for a *destination* -that you are writing data *to*. - -Examples of writable streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -#### Event: 'drain' - -If a [`stream.write(chunk)`][stream-write] call returns `false`, then the -`'drain'` event will indicate when it is appropriate to begin writing more data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - var i = 1000000; - write(); - function write() { - var ok = true; - do { - i -= 1; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -#### Event: 'error' - -* {Error} - -Emitted if there was an error when writing or piping data. - -#### Event: 'finish' - -When the [`stream.end()`][stream-end] method has been called, and all data has -been flushed to the underlying system, this event is emitted. - -```javascript -var writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('this is the end\n'); -writer.on('finish', () => { - console.error('all writes are now complete.'); -}); -``` - -#### Event: 'pipe' - -* `src` {stream.Readable} source stream that is piping to this writable - -This is emitted whenever the [`stream.pipe()`][] method is called on a readable -stream, adding this writable to its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -#### Event: 'unpipe' - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -This is emitted whenever the [`stream.unpipe()`][] method is called on a -readable stream, removing this writable from its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('something has stopped piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -#### writable.cork() - -Forces buffering of all writes. - -Buffered data will be flushed either at [`stream.uncork()`][] or at -[`stream.end()`][stream-end] call. - -#### writable.end([chunk][, encoding][, callback]) - -* `chunk` {String|Buffer} Optional data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Call this method when no more data will be written to the stream. If supplied, -the callback is attached as a listener on the [`'finish'`][] event. - -Calling [`stream.write()`][stream-write] after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -var file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -#### writable.setDefaultEncoding(encoding) - -* `encoding` {String} The new default encoding - -Sets the default encoding for a writable stream. - -#### writable.uncork() - -Flush all data, buffered since [`stream.cork()`][] call. - -#### writable.write(chunk[, encoding][, callback]) - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `true` if the data was handled completely. - -This method writes some data to the underlying system, and calls the -supplied callback once the data has been fully handled. - -The return value indicates if you should continue writing right now. -If the data had to be buffered internally, then it will return -`false`. Otherwise, it will return `true`. - -This return value is strictly advisory. You MAY continue to write, -even if it returns `false`. However, writes will be buffered in -memory, so it is best not to do this excessively. Instead, wait for -the [`'drain'`][] event before writing more data. - - -## API for Stream Implementors - - - -To implement any sort of stream, the pattern is the same: - -1. Extend the appropriate parent class in your own subclass. (The - [`util.inherits()`][] method is particularly helpful for this.) -2. Call the appropriate parent class constructor in your constructor, - to be sure that the internal mechanisms are set up properly. -3. Implement one or more specific methods, as detailed below. - -The class to extend and the method(s) to implement depend on the sort -of stream class you are writing: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable_1)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable_1)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex_1)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform_1)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -In your implementation code, it is very important to never call the methods -described in [API for Stream Consumers][]. Otherwise, you can potentially cause -adverse side effects in programs that consume your streaming interfaces. - -### Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a TCP -socket connection. - -Note that `stream.Duplex` is an abstract class designed to be extended -with an underlying implementation of the [`stream._read(size)`][stream-_read] -and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you -would with a Readable or Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this class -prototypally inherits from Readable, and then parasitically from Writable. It is -thus up to the user to implement both the low-level -[`stream._read(n)`][stream-_read] method as well as the low-level -[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension -duplex classes. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### Class: stream.PassThrough - -This is a trivial implementation of a [Transform][] stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy as a building block for novel sorts of streams. - -### Class: stream.Readable - - - -`stream.Readable` is an abstract class designed to be extended with an -underlying implementation of the [`stream._read(size)`][stream-_read] method. - -Please see [API for Stream Consumers][] for how to consume -streams in your programs. What follows is an explanation of how to -implement Readable streams in your programs. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default = `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default = `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Default = `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -In classes that extend the Readable class, make sure to call the -Readable constructor so that the buffering settings can be properly -initialized. - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **Implement this method, but do NOT call it directly.** - -This method is prefixed with an underscore because it is internal to the -class that defines it and should only be called by the internal Readable -class methods. All Readable stream implementations must provide a \_read -method to fetch data from the underlying resource. - -When `_read()` is called, if data is available from the resource, the `_read()` -implementation should start pushing that data into the read queue by calling -[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from -the resource and pushing data until push returns `false`, at which point it -should stop reading from the resource. Only when `_read()` is called again after -it has stopped should it start reading more data from the resource and pushing -that data onto the queue. - -Note: once the `_read()` method is called, it will not be called again until -the [`stream.push()`][stream-push] method is called. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -#### readable.push(chunk[, encoding]) - - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* return {Boolean} Whether or not more pushes should be performed - -Note: **This method should be called by Readable implementors, NOT -by consumers of Readable streams.** - -If a value other than null is passed, The `push()` method adds a chunk of data -into the queue for subsequent stream processors to consume. If `null` is -passed, it signals the end of the stream (EOF), after which no more data -can be written. - -The data added with `push()` can be pulled out by calling the -[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. - -This API is designed to be as flexible as possible. For example, -you may be wrapping a lower-level source which has some sort of -pause/resume mechanism, and a data callback. In those cases, you -could wrap the low-level source object by doing something like this: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -util.inherits(SourceWrapper, Readable); - -function SourceWrapper(options) { - Readable.call(this, options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, we push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then we need to stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, we push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; -} - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -SourceWrapper.prototype._read = function(size) { - this._source.readStart(); -}; -``` - -#### Example: A Counting Stream - - - -This is a basic example of a Readable stream. It emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; -const util = require('util'); -util.inherits(Counter, Readable); - -function Counter(opt) { - Readable.call(this, opt); - this._max = 1000000; - this._index = 1; -} - -Counter.prototype._read = function() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = new Buffer(str, 'ascii'); - this.push(buf); - } -}; -``` - -#### Example: SimpleProtocol v1 (Sub-optimal) - -This is similar to the `parseHeader` function described -[here](#stream_readable_unshift_chunk), but implemented as a custom stream. -Also, note that this implementation does not convert the incoming data to a -string. - -However, this would be better implemented as a [Transform][] stream. See -[SimpleProtocol v2][] for a better implementation. - -```js -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// NOTE: This can be done more simply as a Transform stream! -// Using Readable directly for this is sub-optimal. See the -// alternative example below under the Transform section. - -const Readable = require('stream').Readable; -const util = require('util'); - -util.inherits(SimpleProtocol, Readable); - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(source, options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', () => { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', () => { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - // calling unshift by itself does not reset the reading state - // of the stream; since we're inside _read, doing an additional - // push('') will reset the state appropriately. - this.push(''); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -// var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a [zlib][] stream or a -[crypto][] stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will produce output -that is either much smaller or much larger than its input. - -Rather than implement the [`stream._read()`][stream-_read] and -[`stream._write()`][stream-_write] methods, Transform classes must implement the -[`stream._transform()`][stream-_transform] method, and may optionally -also implement the [`stream._flush()`][stream-_flush] method. (See below.) - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the parent Writable -and Readable classes respectively. The `'finish'` event is fired after -[`stream.end()`][stream-end] is called and all chunks have been processed by -[`stream._transform()`][stream-_transform], `'end'` is fired after all data has -been output which is after the callback in [`stream._flush()`][stream-_flush] -has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush()` method, which will be -called at the very end, after all the written data is consumed, but -before emitting [`'end'`][] to signal the end of the readable side. Just -like with [`stream._transform()`][stream-_transform], call -`transform.push(chunk)` zero or more times, as appropriate, and call `callback` -when the flush operation is complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument and data) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. - -`_transform()` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. If you supply a second argument to the callback -it will be passed to the push method. In other words the following are -equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### Example: `SimpleProtocol` parser v2 - -The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple -protocol parser can be implemented simply by using the higher level -[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol -v1` examples. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node.js stream -approach. - -```javascript -const util = require('util'); -const Transform = require('stream').Transform; -util.inherits(SimpleProtocol, Transform); - -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(chunk.slice(split)); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(chunk); - } - done(); -}; - -// Usage: -// var parser = new SimpleProtocol(); -// source.pipe(parser) -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Writable - - - -`stream.Writable` is an abstract class designed to be extended with an -underlying implementation of the -[`stream._write(chunk, encoding, callback)`][stream-_write] method. - -Please see [API for Stream Consumers][] for how to consume -writable streams in your programs. What follows is an explanation of -how to implement Writable streams in your programs. - -#### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Default = `16384` - (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Default = `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can - write arbitrary data instead of only `Buffer` / `String` data. - Default = `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a -[`stream._write()`][stream-_write] method to send data to the underlying -resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunks. - -Note: **This function MUST NOT be called directly.** It may be -implemented by child classes, and called by the internal Writable -class methods only. - -This function is completely optional to implement. In most cases it is -unnecessary. If implemented, it will be called with all the chunks -that are buffered in the write queue. - - -## Simplified Constructor API - - - -In simple cases there is now the added benefit of being able to construct a -stream without inheritance. - -This can be done by passing the appropriate methods as constructor options: - -Examples: - -### Duplex - -```js -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -### Readable - -```js -var readable = new stream.Readable({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - } -}); -``` - -### Transform - -```js -var transform = new stream.Transform({ - transform: function(chunk, encoding, next) { - // sets this._transform under the hood - - // generate output as many times as needed - // this.push(chunk); - - // call when the current chunk is consumed - next(); - }, - flush: function(done) { - // sets this._flush under the hood - - // generate output as many times as needed - // this.push(chunk); - - done(); - } -}); -``` - -### Writable - -```js -var writable = new stream.Writable({ - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var writable = new stream.Writable({ - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -## Streams: Under the Hood - - - -### Buffering - - - -Both Writable and Readable streams will buffer data on an internal -object which can be retrieved from `_writableState.getBuffer()` or -`_readableState.buffer`, respectively. - -The amount of data that will potentially be buffered depends on the -`highWaterMark` option which is passed into the constructor. - -Buffering in Readable streams happens when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], then the data will sit in the internal -queue until it is consumed. - -Buffering in Writable streams happens when the user calls -[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. - -The purpose of streams, especially with the [`stream.pipe()`][] method, is to -limit the buffering of data to acceptable levels, so that sources and -destinations of varying speed will not overwhelm the available memory. - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the [`stream.read()`][stream-read] method, - [`'data'`][] events would start emitting immediately. If you needed to do - some I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that you still had to be prepared to receive - [`'data'`][] events even when the stream was in a paused state. - -In Node.js v0.10, the [Readable][] class was added. -For backwards compatibility with older Node.js programs, Readable streams -switch into "flowing mode" when a [`'data'`][] event handler is added, or -when the [`stream.resume()`][stream-resume] method is called. The effect is -that, even if you are not using the new [`stream.read()`][stream-read] method -and [`'readable'`][] event, you no longer have to worry about losing -[`'data'`][] chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No [`'data'`][] event handler is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, -the socket will remain paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to start the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`stream.wrap()`][] method. - - -### Object Mode - - - -Normally, Streams operate on Strings and Buffers exclusively. - -Streams that are in **object mode** can emit generic JavaScript values -other than Buffers and Strings. - -A Readable stream in object mode will always return a single item from -a call to [`stream.read(size)`][stream-read], regardless of what the size -argument is. - -A Writable stream in object mode will always ignore the `encoding` -argument to [`stream.write(data, encoding)`][stream-write]. - -The special value `null` still retains its special value for object -mode streams. That is, for object mode readable streams, `null` as a -return value from [`stream.read()`][stream-read] indicates that there is no more -data, and [`stream.push(null)`][stream-push] will signal the end of stream data -(`EOF`). - -No streams in Node.js core are object mode streams. This pattern is only -used by userland streaming libraries. - -You should set `objectMode` in your stream child class constructor on -the options object. Setting `objectMode` mid-stream is not safe. - -For Duplex streams `objectMode` can be set exclusively for readable or -writable side with `readableObjectMode` and `writableObjectMode` -respectively. These options can be used to implement parsers and -serializers with Transform streams. - -```js -const util = require('util'); -const StringDecoder = require('string_decoder').StringDecoder; -const Transform = require('stream').Transform; -util.inherits(JSONParseStream, Transform); - -// Gets \n-delimited JSON string data, and emits the parsed objects -function JSONParseStream() { - if (!(this instanceof JSONParseStream)) - return new JSONParseStream(); - - Transform.call(this, { readableObjectMode : true }); - - this._buffer = ''; - this._decoder = new StringDecoder('utf8'); -} - -JSONParseStream.prototype._transform = function(chunk, encoding, cb) { - this._buffer += this._decoder.write(chunk); - // split on newlines - var lines = this._buffer.split(/\r?\n/); - // keep the last partial line buffered - this._buffer = lines.pop(); - for (var l = 0; l < lines.length; l++) { - var line = lines[l]; - try { - var obj = JSON.parse(line); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; - -JSONParseStream.prototype._flush = function(cb) { - // Just handle any leftover - var rem = this._buffer.trim(); - if (rem) { - try { - var obj = JSON.parse(rem); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; -``` - -### `stream.read(0)` - -There are some cases where you want to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In that case, you can call `stream.read(0)`, which will always -return null. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -There is almost never a need to do this. However, you will see some -cases in Node.js's internals where this is done, particularly in the -Readable stream class internals. - -### `stream.push('')` - -Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an -interesting side effect. Because it *is* a call to -[`stream.push()`][stream-push], it will end the `reading` process. However, it -does *not* add any data to the readable buffer, so there's nothing for -a user to consume. - -Very rarely, there are cases where you have no data to provide now, -but the consumer of your stream (or, perhaps, another bit of your own -code) will know when to check again, by calling [`stream.read(0)`][stream-read]. -In those cases, you *may* call `stream.push('')`. - -So far, the only use case for this functionality is in the -[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you -find that you have to use `stream.push('')`, please consider another -approach, because it almost certainly indicates that something is -horribly wrong. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream -[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementors]: #stream_api_for_stream_implementors -[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream -[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index c141a99c26c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,58 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af8762..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b8400..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f1868..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 54a9d5c553d..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,880 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Readable.ReadableState = ReadableState; - -var EE = require('events'); - -/**/ -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = undefined; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var StringDecoder; - -util.inherits(Readable, Stream); - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.buffer = []; - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = new Buffer(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var e = new Error('stream.unshift() after end event'); - stream.emit('error', e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -function howMuchToRead(n, state) { - if (state.length === 0 && state.ended) return 0; - - if (state.objectMode) return n === 0 ? 0 : 1; - - if (n === null || isNaN(n)) { - // only flow one buffer at a time - if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; - } - - if (n <= 0) return 0; - - // If we're asking for more than the target buffer level, - // then raise the water mark. Bump up to the next highest - // power of 2, to prevent increasing it excessively in tiny - // amounts. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - // don't have that much. return null, unless we've ended. - if (n > state.length) { - if (!state.ended) { - state.needReadable = true; - return 0; - } else { - return state.length; - } - } - - return n; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - var state = this._readableState; - var nOrig = n; - - if (typeof n !== 'number' || n > 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } - - if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - } - - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (doRead && !state.reading) n = howMuchToRead(nOrig, state); - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } - - state.length -= n; - - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (state.length === 0 && !state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended && state.length === 0) endReadable(this); - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - if (false === ret) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // This is a brutally ugly hack to make sure that our error handler - // is attached before any userland ones. NEVER DO THIS. - if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - // If listening to data, and it has not explicitly been paused, - // then call resume to start the flow of data on the next tick. - if (ev === 'data' && false !== this._readableState.flowing) { - this.resume(); - } - - if (ev === 'readable' && !this._readableState.endEmitted) { - var state = this._readableState; - if (!state.readableListening) { - state.readableListening = true; - state.emittedReadable = false; - state.needReadable = true; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - if (state.flowing) { - do { - var chunk = stream.read(); - } while (null !== chunk && state.flowing); - } -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -function fromList(n, state) { - var list = state.buffer; - var length = state.length; - var stringMode = !!state.decoder; - var objectMode = !!state.objectMode; - var ret; - - // nothing in the list, definitely empty. - if (list.length === 0) return null; - - if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { - // read it all, truncate the array. - if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); - list.length = 0; - } else { - // read just some of it. - if (n < list[0].length) { - // just take a part of the first list item. - // slice is the same for buffers and strings. - var buf = list[0]; - ret = buf.slice(0, n); - list[0] = buf.slice(n); - } else if (n === list[0].length) { - // first list is a perfect match - ret = list.shift(); - } else { - // complex case. - // we have enough to cover it, but it spans past the first buffer. - if (stringMode) ret = '';else ret = new Buffer(n); - - var c = 0; - for (var i = 0, l = list.length; i < l && c < n; i++) { - var buf = list[0]; - var cpy = Math.min(n - c, buf.length); - - if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); - - if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); - - c += cpy; - } - } - } - - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('endReadable called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 625cdc17698..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 95916c992a9..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,516 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // create the two objects needed to store the corked requests - // they are not a linked list, as no new elements are inserted in there - this.corkedRequestsFree = new CorkedRequest(this); - this.corkedRequestsFree.next = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe. Not readable.')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - var er = new TypeError('Invalid non-string/buffer chunk'); - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = new Buffer(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE deleted file mode 100644 index d8d7f9437db..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md deleted file mode 100644 index 5a76b4149c5..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# core-util-is - -The `util.is*` functions introduced in Node v0.12. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch deleted file mode 100644 index a06d5c05f75..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch +++ /dev/null @@ -1,604 +0,0 @@ -diff --git a/lib/util.js b/lib/util.js -index a03e874..9074e8e 100644 ---- a/lib/util.js -+++ b/lib/util.js -@@ -19,430 +19,6 @@ - // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - // USE OR OTHER DEALINGS IN THE SOFTWARE. - --var formatRegExp = /%[sdj%]/g; --exports.format = function(f) { -- if (!isString(f)) { -- var objects = []; -- for (var i = 0; i < arguments.length; i++) { -- objects.push(inspect(arguments[i])); -- } -- return objects.join(' '); -- } -- -- var i = 1; -- var args = arguments; -- var len = args.length; -- var str = String(f).replace(formatRegExp, function(x) { -- if (x === '%%') return '%'; -- if (i >= len) return x; -- switch (x) { -- case '%s': return String(args[i++]); -- case '%d': return Number(args[i++]); -- case '%j': -- try { -- return JSON.stringify(args[i++]); -- } catch (_) { -- return '[Circular]'; -- } -- default: -- return x; -- } -- }); -- for (var x = args[i]; i < len; x = args[++i]) { -- if (isNull(x) || !isObject(x)) { -- str += ' ' + x; -- } else { -- str += ' ' + inspect(x); -- } -- } -- return str; --}; -- -- --// Mark that a method should not be used. --// Returns a modified function which warns once by default. --// If --no-deprecation is set, then it is a no-op. --exports.deprecate = function(fn, msg) { -- // Allow for deprecating things in the process of starting up. -- if (isUndefined(global.process)) { -- return function() { -- return exports.deprecate(fn, msg).apply(this, arguments); -- }; -- } -- -- if (process.noDeprecation === true) { -- return fn; -- } -- -- var warned = false; -- function deprecated() { -- if (!warned) { -- if (process.throwDeprecation) { -- throw new Error(msg); -- } else if (process.traceDeprecation) { -- console.trace(msg); -- } else { -- console.error(msg); -- } -- warned = true; -- } -- return fn.apply(this, arguments); -- } -- -- return deprecated; --}; -- -- --var debugs = {}; --var debugEnviron; --exports.debuglog = function(set) { -- if (isUndefined(debugEnviron)) -- debugEnviron = process.env.NODE_DEBUG || ''; -- set = set.toUpperCase(); -- if (!debugs[set]) { -- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { -- var pid = process.pid; -- debugs[set] = function() { -- var msg = exports.format.apply(exports, arguments); -- console.error('%s %d: %s', set, pid, msg); -- }; -- } else { -- debugs[set] = function() {}; -- } -- } -- return debugs[set]; --}; -- -- --/** -- * Echos the value of a value. Trys to print the value out -- * in the best way possible given the different types. -- * -- * @param {Object} obj The object to print out. -- * @param {Object} opts Optional options object that alters the output. -- */ --/* legacy: obj, showHidden, depth, colors*/ --function inspect(obj, opts) { -- // default options -- var ctx = { -- seen: [], -- stylize: stylizeNoColor -- }; -- // legacy... -- if (arguments.length >= 3) ctx.depth = arguments[2]; -- if (arguments.length >= 4) ctx.colors = arguments[3]; -- if (isBoolean(opts)) { -- // legacy... -- ctx.showHidden = opts; -- } else if (opts) { -- // got an "options" object -- exports._extend(ctx, opts); -- } -- // set default options -- if (isUndefined(ctx.showHidden)) ctx.showHidden = false; -- if (isUndefined(ctx.depth)) ctx.depth = 2; -- if (isUndefined(ctx.colors)) ctx.colors = false; -- if (isUndefined(ctx.customInspect)) ctx.customInspect = true; -- if (ctx.colors) ctx.stylize = stylizeWithColor; -- return formatValue(ctx, obj, ctx.depth); --} --exports.inspect = inspect; -- -- --// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics --inspect.colors = { -- 'bold' : [1, 22], -- 'italic' : [3, 23], -- 'underline' : [4, 24], -- 'inverse' : [7, 27], -- 'white' : [37, 39], -- 'grey' : [90, 39], -- 'black' : [30, 39], -- 'blue' : [34, 39], -- 'cyan' : [36, 39], -- 'green' : [32, 39], -- 'magenta' : [35, 39], -- 'red' : [31, 39], -- 'yellow' : [33, 39] --}; -- --// Don't use 'blue' not visible on cmd.exe --inspect.styles = { -- 'special': 'cyan', -- 'number': 'yellow', -- 'boolean': 'yellow', -- 'undefined': 'grey', -- 'null': 'bold', -- 'string': 'green', -- 'date': 'magenta', -- // "name": intentionally not styling -- 'regexp': 'red' --}; -- -- --function stylizeWithColor(str, styleType) { -- var style = inspect.styles[styleType]; -- -- if (style) { -- return '\u001b[' + inspect.colors[style][0] + 'm' + str + -- '\u001b[' + inspect.colors[style][1] + 'm'; -- } else { -- return str; -- } --} -- -- --function stylizeNoColor(str, styleType) { -- return str; --} -- -- --function arrayToHash(array) { -- var hash = {}; -- -- array.forEach(function(val, idx) { -- hash[val] = true; -- }); -- -- return hash; --} -- -- --function formatValue(ctx, value, recurseTimes) { -- // Provide a hook for user-specified inspect functions. -- // Check that value is an object with an inspect function on it -- if (ctx.customInspect && -- value && -- isFunction(value.inspect) && -- // Filter out the util module, it's inspect function is special -- value.inspect !== exports.inspect && -- // Also filter out any prototype objects using the circular check. -- !(value.constructor && value.constructor.prototype === value)) { -- var ret = value.inspect(recurseTimes, ctx); -- if (!isString(ret)) { -- ret = formatValue(ctx, ret, recurseTimes); -- } -- return ret; -- } -- -- // Primitive types cannot have properties -- var primitive = formatPrimitive(ctx, value); -- if (primitive) { -- return primitive; -- } -- -- // Look up the keys of the object. -- var keys = Object.keys(value); -- var visibleKeys = arrayToHash(keys); -- -- if (ctx.showHidden) { -- keys = Object.getOwnPropertyNames(value); -- } -- -- // Some type of object without properties can be shortcutted. -- if (keys.length === 0) { -- if (isFunction(value)) { -- var name = value.name ? ': ' + value.name : ''; -- return ctx.stylize('[Function' + name + ']', 'special'); -- } -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } -- if (isDate(value)) { -- return ctx.stylize(Date.prototype.toString.call(value), 'date'); -- } -- if (isError(value)) { -- return formatError(value); -- } -- } -- -- var base = '', array = false, braces = ['{', '}']; -- -- // Make Array say that they are Array -- if (isArray(value)) { -- array = true; -- braces = ['[', ']']; -- } -- -- // Make functions say that they are functions -- if (isFunction(value)) { -- var n = value.name ? ': ' + value.name : ''; -- base = ' [Function' + n + ']'; -- } -- -- // Make RegExps say that they are RegExps -- if (isRegExp(value)) { -- base = ' ' + RegExp.prototype.toString.call(value); -- } -- -- // Make dates with properties first say the date -- if (isDate(value)) { -- base = ' ' + Date.prototype.toUTCString.call(value); -- } -- -- // Make error with message first say the error -- if (isError(value)) { -- base = ' ' + formatError(value); -- } -- -- if (keys.length === 0 && (!array || value.length == 0)) { -- return braces[0] + base + braces[1]; -- } -- -- if (recurseTimes < 0) { -- if (isRegExp(value)) { -- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); -- } else { -- return ctx.stylize('[Object]', 'special'); -- } -- } -- -- ctx.seen.push(value); -- -- var output; -- if (array) { -- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); -- } else { -- output = keys.map(function(key) { -- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); -- }); -- } -- -- ctx.seen.pop(); -- -- return reduceToSingleString(output, base, braces); --} -- -- --function formatPrimitive(ctx, value) { -- if (isUndefined(value)) -- return ctx.stylize('undefined', 'undefined'); -- if (isString(value)) { -- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') -- .replace(/'/g, "\\'") -- .replace(/\\"/g, '"') + '\''; -- return ctx.stylize(simple, 'string'); -- } -- if (isNumber(value)) { -- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0, -- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 . -- if (value === 0 && 1 / value < 0) -- return ctx.stylize('-0', 'number'); -- return ctx.stylize('' + value, 'number'); -- } -- if (isBoolean(value)) -- return ctx.stylize('' + value, 'boolean'); -- // For some reason typeof null is "object", so special case here. -- if (isNull(value)) -- return ctx.stylize('null', 'null'); --} -- -- --function formatError(value) { -- return '[' + Error.prototype.toString.call(value) + ']'; --} -- -- --function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { -- var output = []; -- for (var i = 0, l = value.length; i < l; ++i) { -- if (hasOwnProperty(value, String(i))) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- String(i), true)); -- } else { -- output.push(''); -- } -- } -- keys.forEach(function(key) { -- if (!key.match(/^\d+$/)) { -- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, -- key, true)); -- } -- }); -- return output; --} -- -- --function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { -- var name, str, desc; -- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; -- if (desc.get) { -- if (desc.set) { -- str = ctx.stylize('[Getter/Setter]', 'special'); -- } else { -- str = ctx.stylize('[Getter]', 'special'); -- } -- } else { -- if (desc.set) { -- str = ctx.stylize('[Setter]', 'special'); -- } -- } -- if (!hasOwnProperty(visibleKeys, key)) { -- name = '[' + key + ']'; -- } -- if (!str) { -- if (ctx.seen.indexOf(desc.value) < 0) { -- if (isNull(recurseTimes)) { -- str = formatValue(ctx, desc.value, null); -- } else { -- str = formatValue(ctx, desc.value, recurseTimes - 1); -- } -- if (str.indexOf('\n') > -1) { -- if (array) { -- str = str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n').substr(2); -- } else { -- str = '\n' + str.split('\n').map(function(line) { -- return ' ' + line; -- }).join('\n'); -- } -- } -- } else { -- str = ctx.stylize('[Circular]', 'special'); -- } -- } -- if (isUndefined(name)) { -- if (array && key.match(/^\d+$/)) { -- return str; -- } -- name = JSON.stringify('' + key); -- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { -- name = name.substr(1, name.length - 2); -- name = ctx.stylize(name, 'name'); -- } else { -- name = name.replace(/'/g, "\\'") -- .replace(/\\"/g, '"') -- .replace(/(^"|"$)/g, "'"); -- name = ctx.stylize(name, 'string'); -- } -- } -- -- return name + ': ' + str; --} -- -- --function reduceToSingleString(output, base, braces) { -- var numLinesEst = 0; -- var length = output.reduce(function(prev, cur) { -- numLinesEst++; -- if (cur.indexOf('\n') >= 0) numLinesEst++; -- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; -- }, 0); -- -- if (length > 60) { -- return braces[0] + -- (base === '' ? '' : base + '\n ') + -- ' ' + -- output.join(',\n ') + -- ' ' + -- braces[1]; -- } -- -- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; --} -- -- - // NOTE: These type checking functions intentionally don't use `instanceof` - // because it is fragile and can be easily faked with `Object.create()`. - function isArray(ar) { -@@ -522,166 +98,10 @@ function isPrimitive(arg) { - exports.isPrimitive = isPrimitive; - - function isBuffer(arg) { -- return arg instanceof Buffer; -+ return Buffer.isBuffer(arg); - } - exports.isBuffer = isBuffer; - - function objectToString(o) { - return Object.prototype.toString.call(o); --} -- -- --function pad(n) { -- return n < 10 ? '0' + n.toString(10) : n.toString(10); --} -- -- --var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', -- 'Oct', 'Nov', 'Dec']; -- --// 26 Feb 16:19:34 --function timestamp() { -- var d = new Date(); -- var time = [pad(d.getHours()), -- pad(d.getMinutes()), -- pad(d.getSeconds())].join(':'); -- return [d.getDate(), months[d.getMonth()], time].join(' '); --} -- -- --// log is just a thin wrapper to console.log that prepends a timestamp --exports.log = function() { -- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); --}; -- -- --/** -- * Inherit the prototype methods from one constructor into another. -- * -- * The Function.prototype.inherits from lang.js rewritten as a standalone -- * function (not on Function.prototype). NOTE: If this file is to be loaded -- * during bootstrapping this function needs to be rewritten using some native -- * functions as prototype setup using normal JavaScript does not work as -- * expected during bootstrapping (see mirror.js in r114903). -- * -- * @param {function} ctor Constructor function which needs to inherit the -- * prototype. -- * @param {function} superCtor Constructor function to inherit prototype from. -- */ --exports.inherits = function(ctor, superCtor) { -- ctor.super_ = superCtor; -- ctor.prototype = Object.create(superCtor.prototype, { -- constructor: { -- value: ctor, -- enumerable: false, -- writable: true, -- configurable: true -- } -- }); --}; -- --exports._extend = function(origin, add) { -- // Don't do anything if add isn't an object -- if (!add || !isObject(add)) return origin; -- -- var keys = Object.keys(add); -- var i = keys.length; -- while (i--) { -- origin[keys[i]] = add[keys[i]]; -- } -- return origin; --}; -- --function hasOwnProperty(obj, prop) { -- return Object.prototype.hasOwnProperty.call(obj, prop); --} -- -- --// Deprecated old stuff. -- --exports.p = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- console.error(exports.inspect(arguments[i])); -- } --}, 'util.p: Use console.error() instead'); -- -- --exports.exec = exports.deprecate(function() { -- return require('child_process').exec.apply(this, arguments); --}, 'util.exec is now called `child_process.exec`.'); -- -- --exports.print = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(String(arguments[i])); -- } --}, 'util.print: Use console.log instead'); -- -- --exports.puts = exports.deprecate(function() { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stdout.write(arguments[i] + '\n'); -- } --}, 'util.puts: Use console.log instead'); -- -- --exports.debug = exports.deprecate(function(x) { -- process.stderr.write('DEBUG: ' + x + '\n'); --}, 'util.debug: Use console.error instead'); -- -- --exports.error = exports.deprecate(function(x) { -- for (var i = 0, len = arguments.length; i < len; ++i) { -- process.stderr.write(arguments[i] + '\n'); -- } --}, 'util.error: Use console.error instead'); -- -- --exports.pump = exports.deprecate(function(readStream, writeStream, callback) { -- var callbackCalled = false; -- -- function call(a, b, c) { -- if (callback && !callbackCalled) { -- callback(a, b, c); -- callbackCalled = true; -- } -- } -- -- readStream.addListener('data', function(chunk) { -- if (writeStream.write(chunk) === false) readStream.pause(); -- }); -- -- writeStream.addListener('drain', function() { -- readStream.resume(); -- }); -- -- readStream.addListener('end', function() { -- writeStream.end(); -- }); -- -- readStream.addListener('close', function() { -- call(); -- }); -- -- readStream.addListener('error', function(err) { -- writeStream.end(); -- call(err); -- }); -- -- writeStream.addListener('error', function(err) { -- readStream.destroy(); -- call(err); -- }); --}, 'util.pump(): Use readableStream.pipe() instead'); -- -- --var uv; --exports._errnoException = function(err, syscall) { -- if (isUndefined(uv)) uv = process.binding('uv'); -- var errname = uv.errname(err); -- var e = new Error(syscall + ' ' + errname); -- e.code = errname; -- e.errno = errname; -- e.syscall = syscall; -- return e; --}; -+} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js deleted file mode 100644 index ff4c851c075..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. - -function isArray(arg) { - if (Array.isArray) { - return Array.isArray(arg); - } - return objectToString(arg) === '[object Array]'; -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -exports.isBuffer = Buffer.isBuffer; - -function objectToString(o) { - return Object.prototype.toString.call(o); -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json deleted file mode 100644 index 4f65c18e22a..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "core-util-is@~1.0.0", - "_id": "core-util-is@1.0.2", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/core-util-is", - "_nodeVersion": "4.0.0", - "_npmUser": { - "name": "isaacs", - "email": "i@izs.me" - }, - "_npmVersion": "3.3.2", - "_phantomChildren": {}, - "_requested": { - "raw": "core-util-is@~1.0.0", - "scope": null, - "escapedName": "core-util-is", - "name": "core-util-is", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "_shrinkwrap": null, - "_spec": "core-util-is@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/core-util-is/issues" - }, - "dependencies": {}, - "description": "The `util.is*` functions introduced in Node v0.12.", - "devDependencies": { - "tap": "^2.3.0" - }, - "directories": {}, - "dist": { - "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", - "tarball": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - }, - "gitHead": "a177da234df5638b363ddc15fa324619a38577c8", - "homepage": "https://github.com/isaacs/core-util-is#readme", - "keywords": [ - "util", - "isBuffer", - "isArray", - "isNumber", - "isString", - "isRegExp", - "isThis", - "isThat", - "polyfill" - ], - "license": "MIT", - "main": "lib/util.js", - "maintainers": [ - { - "name": "isaacs", - "email": "i@izs.me" - } - ], - "name": "core-util-is", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/core-util-is.git" - }, - "scripts": { - "test": "tap test.js" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js deleted file mode 100644 index 1a490c65ac8..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js +++ /dev/null @@ -1,68 +0,0 @@ -var assert = require('tap'); - -var t = require('./lib/util'); - -assert.equal(t.isArray([]), true); -assert.equal(t.isArray({}), false); - -assert.equal(t.isBoolean(null), false); -assert.equal(t.isBoolean(true), true); -assert.equal(t.isBoolean(false), true); - -assert.equal(t.isNull(null), true); -assert.equal(t.isNull(undefined), false); -assert.equal(t.isNull(false), false); -assert.equal(t.isNull(), false); - -assert.equal(t.isNullOrUndefined(null), true); -assert.equal(t.isNullOrUndefined(undefined), true); -assert.equal(t.isNullOrUndefined(false), false); -assert.equal(t.isNullOrUndefined(), true); - -assert.equal(t.isNumber(null), false); -assert.equal(t.isNumber('1'), false); -assert.equal(t.isNumber(1), true); - -assert.equal(t.isString(null), false); -assert.equal(t.isString('1'), true); -assert.equal(t.isString(1), false); - -assert.equal(t.isSymbol(null), false); -assert.equal(t.isSymbol('1'), false); -assert.equal(t.isSymbol(1), false); -assert.equal(t.isSymbol(Symbol()), true); - -assert.equal(t.isUndefined(null), false); -assert.equal(t.isUndefined(undefined), true); -assert.equal(t.isUndefined(false), false); -assert.equal(t.isUndefined(), true); - -assert.equal(t.isRegExp(null), false); -assert.equal(t.isRegExp('1'), false); -assert.equal(t.isRegExp(new RegExp()), true); - -assert.equal(t.isObject({}), true); -assert.equal(t.isObject([]), true); -assert.equal(t.isObject(new RegExp()), true); -assert.equal(t.isObject(new Date()), true); - -assert.equal(t.isDate(null), false); -assert.equal(t.isDate('1'), false); -assert.equal(t.isDate(new Date()), true); - -assert.equal(t.isError(null), false); -assert.equal(t.isError({ err: true }), false); -assert.equal(t.isError(new Error()), true); - -assert.equal(t.isFunction(null), false); -assert.equal(t.isFunction({ }), false); -assert.equal(t.isFunction(function() {}), true); - -assert.equal(t.isPrimitive(null), true); -assert.equal(t.isPrimitive(''), true); -assert.equal(t.isPrimitive(0), true); -assert.equal(t.isPrimitive(new Date()), false); - -assert.equal(t.isBuffer(null), false); -assert.equal(t.isBuffer({}), false); -assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml deleted file mode 100644 index cc4dba29d95..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile deleted file mode 100644 index 0ecc29c402c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile +++ /dev/null @@ -1,5 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c619..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json deleted file mode 100644 index 9e31b683889..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js deleted file mode 100644 index a57f6349594..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json deleted file mode 100644 index d3a2bda3c65..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "isarray@~1.0.0", - "_id": "isarray@1.0.0", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/isarray", - "_nodeVersion": "5.1.0", - "_npmUser": { - "name": "juliangruber", - "email": "julian@juliangruber.com" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": {}, - "_requested": { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "_shrinkwrap": null, - "_spec": "isarray@~1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "bugs": { - "url": "https://github.com/juliangruber/isarray/issues" - }, - "dependencies": {}, - "description": "Array#isArray for older browsers", - "devDependencies": { - "tape": "~2.13.4" - }, - "directories": {}, - "dist": { - "shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - }, - "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", - "homepage": "https://github.com/juliangruber/isarray", - "keywords": [ - "browser", - "isarray", - "array" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "juliangruber", - "email": "julian@juliangruber.com" - } - ], - "name": "isarray", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "scripts": { - "test": "tape test.js" - }, - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "version": "1.0.0" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js deleted file mode 100644 index f7f7bcd19fe..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js +++ /dev/null @@ -1,19 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml deleted file mode 100644 index 36201b10017..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" - - "0.11" - - "0.12" - - "1.7.1" - - 1 - - 2 - - 3 - - 4 - - 5 diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js deleted file mode 100644 index a4f40f845fa..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -if (!process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = nextTick; -} else { - module.exports = process.nextTick; -} - -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md deleted file mode 100644 index c67e3532b54..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2015 Calvin Metcalf - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json deleted file mode 100644 index f5d106537ad..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "process-nextick-args@~1.0.6", - "_id": "process-nextick-args@1.0.7", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/process-nextick-args", - "_nodeVersion": "5.11.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.8.6", - "_phantomChildren": {}, - "_requested": { - "raw": "process-nextick-args@~1.0.6", - "scope": null, - "escapedName": "process-nextick-args", - "name": "process-nextick-args", - "rawSpec": "~1.0.6", - "spec": ">=1.0.6 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", - "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "_shrinkwrap": null, - "_spec": "process-nextick-args@~1.0.6", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": "", - "bugs": { - "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" - }, - "dependencies": {}, - "description": "process.nextTick but always with args", - "devDependencies": { - "tap": "~0.2.6" - }, - "directories": {}, - "dist": { - "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", - "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" - }, - "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", - "homepage": "https://github.com/calvinmetcalf/process-nextick-args", - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "process-nextick-args", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.7" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md deleted file mode 100644 index 78e7cfaeb7a..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -process-nextick-args -===== - -[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) - -```bash -npm install --save process-nextick-args -``` - -Always be able to pass arguments to process.nextTick, no matter the platform - -```js -var nextTick = require('process-nextick-args'); - -nextTick(function (a, b, c) { - console.log(a, b, c); -}, 'step', 3, 'profit'); -``` diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js deleted file mode 100644 index ef15721584a..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js +++ /dev/null @@ -1,24 +0,0 @@ -var test = require("tap").test; -var nextTick = require('./'); - -test('should work', function (t) { - t.plan(5); - nextTick(function (a) { - t.ok(a); - nextTick(function (thing) { - t.equals(thing, 7); - }, 7); - }, true); - nextTick(function (a, b, c) { - t.equals(a, 'step'); - t.equals(b, 3); - t.equals(c, 'profit'); - }, 'step', 3, 'profit'); -}); - -test('correct number of arguments', function (t) { - t.plan(1); - nextTick(function () { - t.equals(2, arguments.length, 'correct number'); - }, 1, 2); -}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore deleted file mode 100644 index 206320cc1d2..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -build -test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE deleted file mode 100644 index 6de584a48f5..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the -following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md deleted file mode 100644 index 4d2aa001501..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md +++ /dev/null @@ -1,7 +0,0 @@ -**string_decoder.js** (`require('string_decoder')`) from Node.js core - -Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. - -Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** - -The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js deleted file mode 100644 index b00e54fb790..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var Buffer = require('buffer').Buffer; - -var isBufferEncoding = Buffer.isEncoding - || function(encoding) { - switch (encoding && encoding.toLowerCase()) { - case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; - default: return false; - } - } - - -function assertEncoding(encoding) { - if (encoding && !isBufferEncoding(encoding)) { - throw new Error('Unknown encoding: ' + encoding); - } -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. CESU-8 is handled as part of the UTF-8 encoding. -// -// @TODO Handling all encodings inside a single object makes it very difficult -// to reason about this code, so it should be split up in the future. -// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code -// points as used by CESU-8. -var StringDecoder = exports.StringDecoder = function(encoding) { - this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); - assertEncoding(encoding); - switch (this.encoding) { - case 'utf8': - // CESU-8 represents each of Surrogate Pair by 3-bytes - this.surrogateSize = 3; - break; - case 'ucs2': - case 'utf16le': - // UTF-16 represents each of Surrogate Pair by 2-bytes - this.surrogateSize = 2; - this.detectIncompleteChar = utf16DetectIncompleteChar; - break; - case 'base64': - // Base-64 stores 3 bytes in 4 chars, and pads the remainder. - this.surrogateSize = 3; - this.detectIncompleteChar = base64DetectIncompleteChar; - break; - default: - this.write = passThroughWrite; - return; - } - - // Enough space to store all bytes of a single character. UTF-8 needs 4 - // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). - this.charBuffer = new Buffer(6); - // Number of bytes received for the current incomplete multi-byte character. - this.charReceived = 0; - // Number of bytes expected for the current incomplete multi-byte character. - this.charLength = 0; -}; - - -// write decodes the given buffer and returns it as JS string that is -// guaranteed to not contain any partial multi-byte characters. Any partial -// character found at the end of the buffer is buffered up, and will be -// returned when calling write again with the remaining bytes. -// -// Note: Converting a Buffer containing an orphan surrogate to a String -// currently works, but converting a String to a Buffer (via `new Buffer`, or -// Buffer#write) will replace incomplete surrogates with the unicode -// replacement character. See https://codereview.chromium.org/121173009/ . -StringDecoder.prototype.write = function(buffer) { - var charStr = ''; - // if our last write ended with an incomplete multibyte character - while (this.charLength) { - // determine how many remaining bytes this buffer has to offer for this char - var available = (buffer.length >= this.charLength - this.charReceived) ? - this.charLength - this.charReceived : - buffer.length; - - // add the new bytes to the char buffer - buffer.copy(this.charBuffer, this.charReceived, 0, available); - this.charReceived += available; - - if (this.charReceived < this.charLength) { - // still not enough chars in this buffer? wait for more ... - return ''; - } - - // remove bytes belonging to the current character from the buffer - buffer = buffer.slice(available, buffer.length); - - // get the character that was split - charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); - - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - var charCode = charStr.charCodeAt(charStr.length - 1); - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - this.charLength += this.surrogateSize; - charStr = ''; - continue; - } - this.charReceived = this.charLength = 0; - - // if there are no more bytes in this buffer, just emit our char - if (buffer.length === 0) { - return charStr; - } - break; - } - - // determine and set charLength / charReceived - this.detectIncompleteChar(buffer); - - var end = buffer.length; - if (this.charLength) { - // buffer the incomplete character bytes we got - buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); - end -= this.charReceived; - } - - charStr += buffer.toString(this.encoding, 0, end); - - var end = charStr.length - 1; - var charCode = charStr.charCodeAt(end); - // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character - if (charCode >= 0xD800 && charCode <= 0xDBFF) { - var size = this.surrogateSize; - this.charLength += size; - this.charReceived += size; - this.charBuffer.copy(this.charBuffer, size, 0, size); - buffer.copy(this.charBuffer, 0, 0, size); - return charStr.substring(0, end); - } - - // or just emit the charStr - return charStr; -}; - -// detectIncompleteChar determines if there is an incomplete UTF-8 character at -// the end of the given buffer. If so, it sets this.charLength to the byte -// length that character, and sets this.charReceived to the number of bytes -// that are available for this character. -StringDecoder.prototype.detectIncompleteChar = function(buffer) { - // determine how many bytes we have to check at the end of this buffer - var i = (buffer.length >= 3) ? 3 : buffer.length; - - // Figure out if one of the last i bytes of our buffer announces an - // incomplete char. - for (; i > 0; i--) { - var c = buffer[buffer.length - i]; - - // See http://en.wikipedia.org/wiki/UTF-8#Description - - // 110XXXXX - if (i == 1 && c >> 5 == 0x06) { - this.charLength = 2; - break; - } - - // 1110XXXX - if (i <= 2 && c >> 4 == 0x0E) { - this.charLength = 3; - break; - } - - // 11110XXX - if (i <= 3 && c >> 3 == 0x1E) { - this.charLength = 4; - break; - } - } - this.charReceived = i; -}; - -StringDecoder.prototype.end = function(buffer) { - var res = ''; - if (buffer && buffer.length) - res = this.write(buffer); - - if (this.charReceived) { - var cr = this.charReceived; - var buf = this.charBuffer; - var enc = this.encoding; - res += buf.slice(0, cr).toString(enc); - } - - return res; -}; - -function passThroughWrite(buffer) { - return buffer.toString(this.encoding); -} - -function utf16DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 2; - this.charLength = this.charReceived ? 2 : 0; -} - -function base64DetectIncompleteChar(buffer) { - this.charReceived = buffer.length % 3; - this.charLength = this.charReceived ? 3 : 0; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json deleted file mode 100644 index 36fa27f82b4..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "string_decoder@~0.10.x", - "_id": "string_decoder@0.10.31", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/string_decoder", - "_npmUser": { - "name": "rvagg", - "email": "rod@vagg.org" - }, - "_npmVersion": "1.4.23", - "_phantomChildren": {}, - "_requested": { - "raw": "string_decoder@~0.10.x", - "scope": null, - "escapedName": "string_decoder", - "name": "string_decoder", - "rawSpec": "~0.10.x", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "_shrinkwrap": null, - "_spec": "string_decoder@~0.10.x", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/rvagg/string_decoder/issues" - }, - "dependencies": {}, - "description": "The string_decoder module from Node core", - "devDependencies": { - "tap": "~0.4.8" - }, - "directories": {}, - "dist": { - "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", - "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - }, - "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", - "homepage": "https://github.com/rvagg/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "substack", - "email": "mail@substack.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - } - ], - "name": "string_decoder", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/rvagg/string_decoder.git" - }, - "scripts": { - "test": "tap test/simple/*.js" - }, - "version": "0.10.31" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md deleted file mode 100644 index acc8675372e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md +++ /dev/null @@ -1,16 +0,0 @@ - -1.0.2 / 2015-10-07 -================== - - * use try/catch when checking `localStorage` (#3, @kumavis) - -1.0.1 / 2014-11-25 -================== - - * browser: use `console.warn()` for deprecation calls - * browser: more jsdocs - -1.0.0 / 2014-04-30 -================== - - * initial commit diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE deleted file mode 100644 index 6a60e8c225c..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa7c25..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js deleted file mode 100644 index 549ae2f065e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js +++ /dev/null @@ -1,67 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = deprecate; - -/** - * Mark that a method should not be used. - * Returns a modified function which warns once by default. - * - * If `localStorage.noDeprecation = true` is set, then it is a no-op. - * - * If `localStorage.throwDeprecation = true` is set, then deprecated functions - * will throw an Error when invoked. - * - * If `localStorage.traceDeprecation = true` is set, then deprecated functions - * will invoke `console.trace()` instead of `console.error()`. - * - * @param {Function} fn - the function to deprecate - * @param {String} msg - the string to print to the console when `fn` is invoked - * @returns {Function} a new "deprecated" version of `fn` - * @api public - */ - -function deprecate (fn, msg) { - if (config('noDeprecation')) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (config('throwDeprecation')) { - throw new Error(msg); - } else if (config('traceDeprecation')) { - console.trace(msg); - } else { - console.warn(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -} - -/** - * Checks `localStorage` for boolean values for the given `name`. - * - * @param {String} name - * @returns {Boolean} - * @api private - */ - -function config (name) { - // accessing global.localStorage can trigger a DOMException in sandboxed iframes - try { - if (!global.localStorage) return false; - } catch (_) { - return false; - } - var val = global.localStorage[name]; - if (null == val) return false; - return String(val).toLowerCase() === 'true'; -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js deleted file mode 100644 index 5e6fcff5ddd..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js +++ /dev/null @@ -1,6 +0,0 @@ - -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ - -module.exports = require('util').deprecate; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json deleted file mode 100644 index 44061da89bc..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" - ], - [ - { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" - ] - ], - "_from": "util-deprecate@~1.0.1", - "_id": "util-deprecate@1.0.2", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream/util-deprecate", - "_nodeVersion": "4.1.2", - "_npmUser": { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - "_npmVersion": "2.14.4", - "_phantomChildren": {}, - "_requested": { - "raw": "util-deprecate@~1.0.1", - "scope": null, - "escapedName": "util-deprecate", - "name": "util-deprecate", - "rawSpec": "~1.0.1", - "spec": ">=1.0.1 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_shrinkwrap": null, - "_spec": "util-deprecate@~1.0.1", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, - "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" - }, - "dependencies": {}, - "description": "The Node.js `util.deprecate()` function with browser support", - "devDependencies": {}, - "directories": {}, - "dist": { - "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - }, - "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", - "homepage": "https://github.com/TooTallNate/util-deprecate", - "keywords": [ - "util", - "deprecate", - "browserify", - "browser", - "node" - ], - "license": "MIT", - "main": "node.js", - "maintainers": [ - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - } - ], - "name": "util-deprecate", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "version": "1.0.2" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json deleted file mode 100644 index ae39f5f97d5..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream" - ], - [ - { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2" - ] - ], - "_from": "readable-stream@~2.0.0", - "_id": "readable-stream@2.0.6", - "_inCache": true, - "_location": "/mississippi/through2/readable-stream", - "_nodeVersion": "5.7.0", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.6.0", - "_phantomChildren": {}, - "_requested": { - "raw": "readable-stream@~2.0.0", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", - "type": "range" - }, - "_requiredBy": [ - "/mississippi/through2" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", - "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "_shrinkwrap": null, - "_spec": "readable-stream@~2.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2", - "browser": { - "util": false - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "tap": "~0.2.6", - "tape": "~4.5.1", - "zuul": "~3.9.0" - }, - "directories": {}, - "dist": { - "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz" - }, - "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e", - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "maintainers": [ - { - "name": "isaacs", - "email": "isaacs@npmjs.com" - }, - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - }, - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "readable-stream", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "browser": "npm run write-zuul && zuul -- test/browser.js", - "test": "tap test/parallel/*.js test/ours/*.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "version": "2.0.6" -} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a5516..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js deleted file mode 100644 index 6222a579864..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,12 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f0780e..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3c1..00000000000 --- a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/package.json index c7413b9f91c..a60bc8b192d 100644 --- a/deps/npm/node_modules/mississippi/node_modules/through2/package.json +++ b/deps/npm/node_modules/mississippi/node_modules/through2/package.json @@ -14,22 +14,20 @@ ] ], "_from": "through2@>=2.0.0 <3.0.0", - "_id": "through2@2.0.1", + "_id": "through2@2.0.3", "_inCache": true, "_location": "/mississippi/through2", - "_nodeVersion": "5.5.0", + "_nodeVersion": "7.2.0", "_npmOperationalInternal": { - "host": "packages-6-west.internal.npmjs.com", - "tmp": "tmp/through2-2.0.1.tgz_1454928418348_0.7339043114334345" + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/through2-2.0.3.tgz_1480373529377_0.264686161885038" }, "_npmUser": { "name": "rvagg", "email": "rod@vagg.org" }, - "_npmVersion": "3.6.0", - "_phantomChildren": { - "inherits": "2.0.3" - }, + "_npmVersion": "3.10.9", + "_phantomChildren": {}, "_requested": { "raw": "through2@^2.0.0", "scope": null, @@ -42,8 +40,8 @@ "_requiredBy": [ "/mississippi" ], - "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz", - "_shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", + "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz", + "_shasum": "0004569b37c7c74ba39c43f3ced78d1ad94140be", "_shrinkwrap": null, "_spec": "through2@^2.0.0", "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", @@ -56,22 +54,22 @@ "url": "https://github.com/rvagg/through2/issues" }, "dependencies": { - "readable-stream": "~2.0.0", - "xtend": "~4.0.0" + "readable-stream": "^2.1.5", + "xtend": "~4.0.1" }, "description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise", "devDependencies": { - "bl": "~0.9.4", + "bl": "~1.1.2", "faucet": "0.0.1", "stream-spigot": "~3.0.5", - "tape": "~4.0.0" + "tape": "~4.6.2" }, "directories": {}, "dist": { - "shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", - "tarball": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz" + "shasum": "0004569b37c7c74ba39c43f3ced78d1ad94140be", + "tarball": "https://registry.npmjs.org/through2/-/through2-2.0.3.tgz" }, - "gitHead": "6d52a1b77db13a741f2708cd5854a198e4ae3072", + "gitHead": "4383b10b2cb6a32ae215760715b317513abe609f", "homepage": "https://github.com/rvagg/through2#readme", "keywords": [ "stream", @@ -102,5 +100,5 @@ "test": "node test/test.js | faucet", "test-local": "brtapsauce-local test/basic-test.js" }, - "version": "2.0.1" + "version": "2.0.3" } diff --git a/deps/npm/node_modules/mississippi/package.json b/deps/npm/node_modules/mississippi/package.json index 3595472180e..503a931104a 100644 --- a/deps/npm/node_modules/mississippi/package.json +++ b/deps/npm/node_modules/mississippi/package.json @@ -2,50 +2,54 @@ "_args": [ [ { - "raw": "mississippi@~1.2.0", + "raw": "mississippi@1.3.0", "scope": null, "escapedName": "mississippi", "name": "mississippi", - "rawSpec": "~1.2.0", - "spec": ">=1.2.0 <1.3.0", - "type": "range" + "rawSpec": "1.3.0", + "spec": "1.3.0", + "type": "version" }, "/Users/zkat/Documents/code/npm" ] ], - "_from": "mississippi@>=1.2.0 <1.3.0", - "_id": "mississippi@1.2.0", + "_from": "mississippi@1.3.0", + "_id": "mississippi@1.3.0", "_inCache": true, "_location": "/mississippi", - "_nodeVersion": "4.2.3", + "_nodeVersion": "6.9.2", + "_npmOperationalInternal": { + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/mississippi-1.3.0.tgz_1484780252704_0.3479328122921288" + }, "_npmUser": { "name": "maxogden", "email": "max@maxogden.com" }, - "_npmVersion": "2.14.15", + "_npmVersion": "3.10.9", "_phantomChildren": { "inherits": "2.0.3", "once": "1.4.0", - "readable-stream": "2.1.5", + "readable-stream": "2.2.2", "wrappy": "1.0.2" }, "_requested": { - "raw": "mississippi@~1.2.0", + "raw": "mississippi@1.3.0", "scope": null, "escapedName": "mississippi", "name": "mississippi", - "rawSpec": "~1.2.0", - "spec": ">=1.2.0 <1.3.0", - "type": "range" + "rawSpec": "1.3.0", + "spec": "1.3.0", + "type": "version" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz", - "_shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", + "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-1.3.0.tgz", + "_shasum": "d201583eb12327e3c5c1642a404a9cacf94e34f5", "_shrinkwrap": null, - "_spec": "mississippi@~1.2.0", + "_spec": "mississippi@1.3.0", "_where": "/Users/zkat/Documents/code/npm", "author": { "name": "max ogden" @@ -59,6 +63,7 @@ "end-of-stream": "^1.1.0", "flush-write-stream": "^1.0.0", "from2": "^2.1.0", + "parallel-transform": "^1.1.0", "pump": "^1.0.0", "pumpify": "^1.3.3", "stream-each": "^1.1.0", @@ -68,10 +73,10 @@ "devDependencies": {}, "directories": {}, "dist": { - "shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", - "tarball": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz" + "shasum": "d201583eb12327e3c5c1642a404a9cacf94e34f5", + "tarball": "https://registry.npmjs.org/mississippi/-/mississippi-1.3.0.tgz" }, - "gitHead": "4aab2a2d4d98fd5e300a329048eb02a12df44c60", + "gitHead": "dd64841b932d06baf7859ee80db78d139c90b4c7", "homepage": "https://github.com/maxogden/mississippi#readme", "license": "BSD-2-Clause", "main": "index.js", @@ -91,5 +96,5 @@ "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, - "version": "1.2.0" + "version": "1.3.0" } diff --git a/deps/npm/node_modules/mississippi/readme.md b/deps/npm/node_modules/mississippi/readme.md index 9013bb0dc53..569803865c5 100644 --- a/deps/npm/node_modules/mississippi/readme.md +++ b/deps/npm/node_modules/mississippi/readme.md @@ -21,6 +21,7 @@ var miss = require('mississippi') - [to](#to) - [concat](#concat) - [finished](#finished) +- [parallel](#parallel) ### pipe @@ -28,13 +29,13 @@ var miss = require('mississippi') Pipes streams together and destroys all of them if one of them closes. Calls `cb` with `(error)` if there was an error in any of the streams. -When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when then pipe has finished. +When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when the pipe has finished. `miss.pipe` does these two things for you, ensuring you handle stream errors 100% of the time (unhandled errors are probably the most common bug in most node streams code) #### original module -`miss.pipe` is provided by [`require('pump')`](https://npmjs.org/pump) +`miss.pipe` is provided by [`require('pump')`](https://www.npmjs.com/package/pump) #### example @@ -56,13 +57,13 @@ miss.pipe(read, write, function (err) { ##### `miss.each(stream, each, [done])` -Iterate the data in `stream` one chunk at a time. Your `each` function will be called with with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk. +Iterate the data in `stream` one chunk at a time. Your `each` function will be called with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk. Optionally you can call `next` with an error to destroy the stream. You can also pass the optional third argument, `done`, which is a function that will be called with `(err)` when the stream ends. The `err` argument will be populated with an error if the stream emitted an error. #### original module -`miss.each` is provided by [`require('stream-each')`](https://npmjs.org/stream-each) +`miss.each` is provided by [`require('stream-each')`](https://www.npmjs.com/package/stream-each) #### example @@ -97,7 +98,7 @@ If any of the streams in the pipeline emits an error or gets destroyed, or you d #### original module -`miss.pipeline` is provided by [`require('pumpify')`](https://npmjs.org/pumpify) +`miss.pipeline` is provided by [`require('pumpify')`](https://www.npmjs.com/package/pumpify) #### example @@ -136,7 +137,7 @@ You can either choose to supply the writable and the readable at the time you cr #### original module -`miss.duplex` is provided by [`require('duplexify')`](https://npmjs.org/duplexify) +`miss.duplex` is provided by [`require('duplexify')`](https://www.npmjs.com/package/duplexify) #### example @@ -165,7 +166,7 @@ The `flushFunction`, with signature `(cb)`, is called just before the stream is #### original module -`miss.through` is provided by [`require('through2')`](https://npmjs.org/through2) +`miss.through` is provided by [`require('through2')`](https://www.npmjs.com/package/through2) #### example @@ -178,10 +179,10 @@ var write = fs.createWriteStream('./AWESOMECASE.TXT') // Leaving out the options object var uppercaser = miss.through( function (chunk, enc, cb) { - cb(chunk.toString().toUpperCase()) + cb(null, chunk.toString().toUpperCase()) }, function (cb) { - cb('ONE LAST BIT OF UPPERCASE') + cb(null, 'ONE LAST BIT OF UPPERCASE') } ) @@ -206,7 +207,7 @@ Returns a readable stream that calls `read(size, next)` when data is requested f #### original module -`miss.from` is provided by [`require('from2')`](https://npmjs.org/from2) +`miss.from` is provided by [`require('from2')`](https://www.npmjs.com/package/from2) #### example @@ -252,7 +253,7 @@ Returns a writable stream that calls `write(data, enc, cb)` when data is written #### original module -`miss.to` is provided by [`require('flush-write-stream')`](https://npmjs.org/flush-write-stream) +`miss.to` is provided by [`require('flush-write-stream')`](https://www.npmjs.com/package/flush-write-stream) #### example @@ -294,13 +295,13 @@ finished Returns a writable stream that concatenates all data written to the stream and calls a callback with the single result. -Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will containe the result of concatenating all the data written to the stream. +Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will contain the result of concatenating all the data written to the stream. Note that `miss.concat` will not handle stream errors for you. To handle errors, use `miss.pipe` or handle the `error` event manually. #### original module -`miss.concat` is provided by [`require('concat-stream')`](https://npmjs.org/concat-stream) +`miss.concat` is provided by [`require('concat-stream')`](https://www.npmjs.com/package/concat-stream) #### example @@ -335,7 +336,7 @@ This function is useful for simplifying stream handling code as it lets you hand #### original module -`miss.finished` is provided by [`require('end-of-stream')`](https://npmjs.org/end-of-stream) +`miss.finished` is provided by [`require('end-of-stream')`](https://www.npmjs.com/package/end-of-stream) #### example @@ -350,3 +351,44 @@ miss.finished(copyDest, function(err) { console.log('write success') }) ``` + +### parallel + +#####`miss.parallel(concurrency, each)` + +This works like `through` except you can process items in parallel, while still preserving the original input order. + +This is handy if you wanna take advantage of node's async I/O and process streams of items in batches. With this module you can build your very own streaming parallel job queue. + +Note that `miss.parallel` preserves input ordering, if you don't need that then you can use [through2-concurrent](https://github.com/almost/through2-concurrent) instead, which is very similar to this otherwise. + +#### original module + +`miss.parallel` is provided by [`require('parallel-transform')`](https://npmjs.org/parallel-transform) + +#### example + +This example fetches the GET HTTP headers for a stream of input URLs 5 at a time in parallel. + +```js +function getResponse (item, cb) { + var r = request(item.url) + r.on('error', function (err) { + cb(err) + }) + r.on('response', function (re) { + cb(null, {url: item.url, date: new Date(), status: re.statusCode, headers: re.headers}) + r.abort() + }) +} + +miss.pipe( + fs.createReadStream('./urls.txt'), // one url per line + split(), + miss.parallel(5, getResponse), + miss.through(function (row, enc, next) { + console.log(JSON.stringify(row)) + next() + }) +) +``` diff --git a/deps/npm/node_modules/request/node_modules/form-data/README.md b/deps/npm/node_modules/request/node_modules/form-data/README.md new file mode 100644 index 00000000000..642a9d14a80 --- /dev/null +++ b/deps/npm/node_modules/request/node_modules/form-data/README.md @@ -0,0 +1,217 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=linux:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=macos:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.1.2.svg?label=windows:0.12-6.x)](https://ci.appveyor.com/project/alexindigo/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.1.2.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) +[![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', + contentType: 'image/jpg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/deps/npm/package.json b/deps/npm/package.json index 4a15414a98d..daed5a47476 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "4.1.2", + "version": "4.2.0", "name": "npm", "description": "a package manager for JavaScript", "keywords": [ @@ -32,6 +32,7 @@ "dependencies": { "JSONStream": "~1.3.0", "abbrev": "~1.0.9", + "ansi-regex": "~2.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", "aproba": "~1.0.4", @@ -62,7 +63,7 @@ "lodash.union": "~4.6.0", "lodash.uniq": "~4.5.0", "lodash.without": "~4.4.0", - "mississippi": "~1.2.0", + "mississippi": "~1.3.0", "mkdirp": "~0.5.1", "node-gyp": "~3.5.0", "nopt": "~4.0.1", diff --git a/deps/npm/test/tap/debug-logs.js b/deps/npm/test/tap/debug-logs.js new file mode 100644 index 00000000000..fbd244446e1 --- /dev/null +++ b/deps/npm/test/tap/debug-logs.js @@ -0,0 +1,101 @@ +'use strict' +var path = require('path') +var test = require('tap').test +var Tacks = require('tacks') +var glob = require('glob') +var asyncMap = require('slide').asyncMap +var File = Tacks.File +var Dir = Tacks.Dir +var extend = Object.assign || require('util')._extend +var common = require('../common-tap.js') + +var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = path.join(basedir, 'testdir') +var cachedir = path.join(basedir, 'cache') +var globaldir = path.join(basedir, 'global') +var tmpdir = path.join(basedir, 'tmp') + +var conf = { + cwd: testdir, + env: extend(extend({}, process.env), { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +var fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'debug-logs', + version: '1.0.0', + scripts: { + true: 'node -e "process.exit(0)"', + false: 'node -e "process.exit(1)"' + } + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + t.done() +}) + +test('example', function (t) { + common.npm(['run', 'false'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 1, 'command errored') + var matches = stderr.match(/Please include the following file with any support request:.*\nnpm ERR! {5,5}(.*)/) + t.ok(matches, 'debug log mentioned in error message') + if (matches) { + var logfile = matches[1] + t.matches(path.relative(cachedir, logfile), /^_logs/, 'debug log is inside the cache in _logs') + } + + // we run a bunch concurrently, this will actually create > than our limit as the check is done + // when the command starts + var todo = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + asyncMap(todo, function (num, next) { + common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { + if (err) throw err + t.is(code, 1, 'run #' + num + ' errored as expected') + next() + }) + }, function () { + // now we do one more and that should clean up the list + common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { + if (err) throw err + t.is(code, 1, 'final run errored as expected') + var files = glob.sync(path.join(cachedir, '_logs', '*')) + t.is(files.length, 10, 'there should never be more than 10 log files') + common.npm(['run', '--logs-max=5', 'true'], conf, function (err, code) { + if (err) throw err + t.is(code, 0, 'success run is ok') + var files = glob.sync(path.join(cachedir, '_logs', '*')) + t.is(files.length, 4, 'after success there should be logs-max - 1 log files') + t.done() + }) + }) + }) + }) +}) + +test('cleanup', function (t) { + cleanup() + t.done() +}) diff --git a/deps/npm/test/tap/gently-rm-overeager.js b/deps/npm/test/tap/gently-rm-overeager.js index c266f1c4dc1..08fa72bc71c 100644 --- a/deps/npm/test/tap/gently-rm-overeager.js +++ b/deps/npm/test/tap/gently-rm-overeager.js @@ -1,4 +1,4 @@ -var resolve = require('path').resolve +var path = require('path') var fs = require('graceful-fs') var test = require('tap').test var mkdirp = require('mkdirp') @@ -6,8 +6,8 @@ var rimraf = require('rimraf') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'gently-rm-overeager') -var dep = resolve(__dirname, 'test-whoops') +var pkg = path.join(__dirname, 'gently-rm-overeager') +var dep = path.join(__dirname, 'test-whoops') var EXEC_OPTS = { cwd: pkg } @@ -32,8 +32,7 @@ test('cache add', function (t) { t.ok(c, 'test-whoops install also failed') fs.readdir(pkg, function (er, files) { t.ifError(er, 'package directory is still there') - t.deepEqual(files, ['npm-debug.log'], 'only debug log remains') - + t.deepEqual(files, [], 'no files remain') t.end() }) }) @@ -53,7 +52,7 @@ function cleanup () { function setup () { mkdirp.sync(pkg) // so it doesn't try to install into npm's own node_modules - mkdirp.sync(resolve(pkg, 'node_modules')) + mkdirp.sync(path.join(pkg, 'node_modules')) mkdirp.sync(dep) - fs.writeFileSync(resolve(dep, 'package.json'), JSON.stringify(fixture)) + fs.writeFileSync(path.join(dep, 'package.json'), JSON.stringify(fixture)) } diff --git a/deps/npm/test/tap/run-script.js b/deps/npm/test/tap/run-script.js index 3892337cee9..4dea9b83604 100644 --- a/deps/npm/test/tap/run-script.js +++ b/deps/npm/test/tap/run-script.js @@ -67,6 +67,14 @@ var preversionOnly = { } } +var exitCode = { + name: 'scripted', + version: '1.2.3', + scripts: { + 'start': 'node -e "process.exit(7)"' + } +} + function testOutput (t, command, er, code, stdout, stderr) { var lines @@ -323,6 +331,17 @@ test('npm run-script no-params (direct only)', function (t) { }) }) +test('npm run-script keep non-zero exit code', function (t) { + writeMetadata(exitCode) + + common.npm(['run-script', 'start'], opts, function (err, code, stdout, stderr) { + t.ifError(err, 'ran run-script without parameters without crashing') + t.equal(code, 7, 'got expected exit code') + t.ok(stderr, 'should generate errors') + t.end() + }) +}) + test('cleanup', function (t) { cleanup() t.end() diff --git a/deps/npm/test/tap/search.all-package-search.js b/deps/npm/test/tap/search.all-package-search.js new file mode 100644 index 00000000000..c70f4f8e7ef --- /dev/null +++ b/deps/npm/test/tap/search.all-package-search.js @@ -0,0 +1,201 @@ +var path = require('path') +var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') +var osenv = require('osenv') +var rimraf = require('rimraf') +var cacheFile = require('npm-cache-filename') +var test = require('tap').test +var Tacks = require('tacks') +var File = Tacks.File + +var common = require('../common-tap.js') + +var PKG_DIR = path.resolve(__dirname, 'search') +var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') +var cachePath = path.join(cacheBase, '.cache.json') + +var server + +test('setup', function (t) { + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + t.pass('all set up') + t.done() + }) +}) + +var searches = [ + { + term: 'cool', + description: 'non-regex search', + location: 103 + }, + { + term: '/cool/', + description: 'regex search', + location: 103 + }, + { + term: 'cool', + description: 'searches name field', + location: 103 + }, + { + term: 'ool', + description: 'excludes matches for --searchexclude', + location: 205, + inject: { + other: { name: 'other', description: 'this is a simple tool' } + }, + extraOpts: ['--searchexclude', 'cool'] + }, + { + term: 'neat lib', + description: 'searches description field', + location: 141, + inject: { + cool: { + name: 'cool', version: '5.0.0', description: 'this is a neat lib' + } + } + }, + { + term: 'foo', + description: 'skips description field with --no-description', + location: 80, + inject: { + cool: { + name: 'cool', version: '5.0.0', description: 'foo bar!' + } + }, + extraOpts: ['--no-description'] + }, + { + term: 'zkat', + description: 'searches maintainers by name', + location: 155, + inject: { + cool: { + name: 'cool', + version: '5.0.0', + maintainers: [{ + name: 'zkat' + }] + } + } + }, + { + term: '=zkat', + description: 'searches maintainers unambiguously by =name', + location: 154, + inject: { + bar: { name: 'bar', description: 'zkat thing', version: '1.0.0' }, + cool: { + name: 'cool', + version: '5.0.0', + maintainers: [{ + name: 'zkat' + }] + } + } + }, + { + term: 'github.com', + description: 'searches projects by url', + location: 205, + inject: { + bar: { + name: 'bar', + url: 'gitlab.com/bar', + // For historical reasons, `url` is only present if `versions` is there + versions: ['1.0.0'], + version: '1.0.0' + }, + cool: { + name: 'cool', + version: '5.0.0', + versions: ['1.0.0'], + url: 'github.com/cool/cool' + } + } + }, + { + term: 'monad', + description: 'searches projects by keywords', + location: 197, + inject: { + cool: { + name: 'cool', + version: '5.0.0', + keywords: ['monads'] + } + } + } +] + +// These test classic hand-matched searches +searches.forEach(function (search) { + test(search.description, function (t) { + setup() + server.get('/-/v1/search?text=' + encodeURIComponent(search.term) + '&size=20').once().reply(404, {}) + var now = Date.now() + var cacheContents = { + '_updated': now, + bar: { name: 'bar', version: '1.0.0' }, + cool: { name: 'cool', version: '5.0.0' }, + foo: { name: 'foo', version: '2.0.0' }, + other: { name: 'other', version: '1.0.0' } + } + for (var k in search.inject) { + cacheContents[k] = search.inject[k] + } + var fixture = new Tacks(File(cacheContents)) + fixture.create(cachePath) + common.npm([ + 'search', search.term, + '--registry', common.registry, + '--cache', CACHE_DIR, + '--loglevel', 'error', + '--color', 'always' + ].concat(search.extraOpts || []), + {}, + function (err, code, stdout, stderr) { + t.equal(stderr, '', 'no error output') + t.notEqual(stdout, '', 'got output') + t.equal(code, 0, 'search finished successfully') + t.ifErr(err, 'search finished successfully') + // \033 == \u001B + var markStart = '\u001B\\[[0-9][0-9]m' + var markEnd = '\u001B\\[0m' + + var re = new RegExp(markStart + '.*?' + markEnd) + + var cnt = stdout.search(re) + t.equal( + cnt, + search.location, + search.description + ' search for ' + search.term + ) + t.end() + }) + }) +}) + +test('cleanup', function (t) { + cleanup() + server.close() + t.end() +}) + +function setup () { + cleanup() + mkdirp.sync(cacheBase) +} + +function cleanup () { + server.done() + process.chdir(osenv.tmpdir()) + rimraf.sync(PKG_DIR) +} diff --git a/deps/npm/test/tap/search.esearch.js b/deps/npm/test/tap/search.esearch.js new file mode 100644 index 00000000000..d892aec9575 --- /dev/null +++ b/deps/npm/test/tap/search.esearch.js @@ -0,0 +1,192 @@ +var common = require('../common-tap.js') +var finished = require('mississippi').finished +var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') +var npm = require('../../') +var osenv = require('osenv') +var path = require('path') +var rimraf = require('rimraf') +var test = require('tap').test + +var SEARCH = '/-/v1/search' +var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') +var CACHE_DIR = path.resolve(PKG_DIR, 'cache') + +var esearch = require('../../lib/search/esearch') + +var server + +function setup () { + cleanup() + mkdirp.sync(CACHE_DIR) + process.chdir(CACHE_DIR) +} + +function cleanup () { + process.chdir(osenv.tmpdir()) + rimraf.sync(PKG_DIR) +} + +test('setup', function (t) { + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { + t.ifError(err, 'npm loaded successfully') + server = s + t.pass('all set up') + t.done() + }) + }) +}) + +test('basic test', function (t) { + setup() + server.get(SEARCH + '?text=oo&size=1').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) + var results = [] + var s = esearch({ + include: ['oo'], + limit: 1 + }) + t.ok(s, 'got a stream') + s.on('data', function (d) { + results.push(d) + }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'stream finished without error') + t.deepEquals(results, [{ + name: 'cool', + version: '1.0.0', + description: null, + maintainers: null, + keywords: null, + date: null + }, { + name: 'foo', + version: '2.0.0', + description: null, + maintainers: null, + keywords: null, + date: null + }]) + server.done() + t.done() + }) +}) + +test('only returns certain fields for each package', function (t) { + setup() + var date = new Date() + server.get(SEARCH + '?text=oo&size=1').once().reply(200, { + objects: [{ + package: { + name: 'cool', + version: '1.0.0', + description: 'desc', + maintainers: [ + {username: 'x', email: 'a@b.c'}, + {username: 'y', email: 'c@b.a'} + ], + keywords: ['a', 'b', 'c'], + date: date.toISOString(), + extra: 'lol' + } + }] + }) + var results = [] + var s = esearch({ + include: ['oo'], + limit: 1 + }) + t.ok(s, 'got a stream') + s.on('data', function (d) { + results.push(d) + }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'stream finished without error') + t.deepEquals(results, [{ + name: 'cool', + version: '1.0.0', + description: 'desc', + maintainers: [ + {username: 'x', email: 'a@b.c'}, + {username: 'y', email: 'c@b.a'} + ], + keywords: ['a', 'b', 'c'], + date: date + }]) + server.done() + t.done() + }) +}) + +test('accepts a limit option', function (t) { + setup() + server.get(SEARCH + '?text=oo&size=3').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + var results = 0 + var s = esearch({ + include: ['oo'], + limit: 3 + }) + s.on('data', function () { results++ }) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct size') + t.equal(results, 2, 'behaves fine with fewer results than size') + server.done() + t.done() + }) +}) + +test('passes foo:bar syntax params directly', function (t) { + setup() + server.get(SEARCH + '?text=foo%3Abar&size=1').once().reply(200, { + objects: [] + }) + var s = esearch({ + include: ['foo:bar'], + limit: 1 + }) + s.on('data', function () {}) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct params') + server.done() + t.done() + }) +}) + +test('space-separates and URI-encodes multiple search params', function (t) { + setup() + server.get(SEARCH + '?text=foo%20bar%3Abaz%20quux%3F%3D&size=1').once().reply(200, { + objects: [] + }) + var s = esearch({ + include: ['foo', 'bar:baz', 'quux?='], + limit: 1 + }) + s.on('data', function () {}) + finished(s, function (err) { + if (err) { throw err } + t.ok(true, 'request sent with correct params') + server.done() + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) diff --git a/deps/npm/test/tap/search.js b/deps/npm/test/tap/search.js index dcc46df78e3..78436e1e29a 100644 --- a/deps/npm/test/tap/search.js +++ b/deps/npm/test/tap/search.js @@ -1,5 +1,6 @@ var path = require('path') var mkdirp = require('mkdirp') +var mr = require('npm-registry-mock') var osenv = require('osenv') var rimraf = require('rimraf') var cacheFile = require('npm-cache-filename') @@ -14,28 +15,26 @@ var CACHE_DIR = path.resolve(PKG_DIR, 'cache') var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') var cachePath = path.join(cacheBase, '.cache.json') +var server + test('setup', function (t) { - t.pass('all set up') - t.done() + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + t.pass('all set up') + t.done() + }) }) test('notifies when there are no results', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=none&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'none', '--registry', common.registry, - '--loglevel', 'error', - '--cache', CACHE_DIR + '--loglevel', 'error' ], {}, function (err, code, stdout, stderr) { if (err) throw err t.equal(stderr, '', 'no error output') @@ -47,6 +46,8 @@ test('notifies when there are no results', function (t) { test('spits out a useful error when no cache nor network', function (t) { setup() + server.get('/-/v1/search?text=foo&size=20').once().reply(404, {}) + server.get('/-/all').many().reply(404, {}) var cacheContents = {} var fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) @@ -54,6 +55,7 @@ test('spits out a useful error when no cache nor network', function (t) { 'search', 'foo', '--registry', common.registry, '--loglevel', 'silly', + '--json', '--fetch-retry-mintimeout', 0, '--fetch-retry-maxtimeout', 0, '--cache', CACHE_DIR @@ -68,16 +70,12 @@ test('spits out a useful error when no cache nor network', function (t) { test('can switch to JSON mode', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) common.npm([ 'search', 'oo', '--json', @@ -86,30 +84,23 @@ test('can switch to JSON mode', function (t) { '--cache', CACHE_DIR ], {}, function (err, code, stdout, stderr) { if (err) throw err - t.deepEquals(JSON.parse(stdout), [ - { name: 'cool', version: '1.0.0' }, - { name: 'foo', version: '2.0.0' } - ], 'results returned as valid json') t.equal(stderr, '', 'no error output') t.equal(code, 0, 'search gives 0 error code even if no matches') + t.deepEquals(JSON.parse(stdout), [ + { name: 'cool', version: '1.0.0', date: null }, + { name: 'foo', version: '2.0.0', date: null } + ], 'results returned as valid json') t.done() }) }) test('JSON mode does not notify on empty', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'oo', '--json', '--registry', common.registry, '--loglevel', 'error', @@ -125,16 +116,12 @@ test('JSON mode does not notify on empty', function (t) { test('can switch to tab separated mode', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' } } + ] + }) common.npm([ 'search', 'oo', '--parseable', @@ -152,18 +139,11 @@ test('can switch to tab separated mode', function (t) { test('tab mode does not notify on empty', function (t) { setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '1.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) + server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + objects: [] + }) common.npm([ - 'search', 'nomatcheswhatsoeverfromthis', + 'search', 'oo', '--parseable', '--registry', common.registry, '--loglevel', 'error', @@ -192,163 +172,9 @@ test('no arguments provided should error', function (t) { }) }) -var searches = [ - { - term: 'cool', - description: 'non-regex search', - location: 103 - }, - { - term: '/cool/', - description: 'regex search', - location: 103 - }, - { - term: 'cool', - description: 'searches name field', - location: 103 - }, - { - term: 'ool', - description: 'excludes matches for --searchexclude', - location: 205, - inject: { - other: { name: 'other', description: 'this is a simple tool' } - }, - extraOpts: ['--searchexclude', 'cool'] - }, - { - term: 'neat lib', - description: 'searches description field', - location: 141, - inject: { - cool: { - name: 'cool', version: '5.0.0', description: 'this is a neat lib' - } - } - }, - { - term: 'foo', - description: 'skips description field with --no-description', - location: 80, - inject: { - cool: { - name: 'cool', version: '5.0.0', description: 'foo bar!' - } - }, - extraOpts: ['--no-description'] - }, - { - term: 'zkat', - description: 'searches maintainers by name', - location: 155, - inject: { - cool: { - name: 'cool', - version: '5.0.0', - maintainers: [{ - name: 'zkat' - }] - } - } - }, - { - term: '=zkat', - description: 'searches maintainers unambiguously by =name', - location: 154, - inject: { - bar: { name: 'bar', description: 'zkat thing', version: '1.0.0' }, - cool: { - name: 'cool', - version: '5.0.0', - maintainers: [{ - name: 'zkat' - }] - } - } - }, - { - term: 'github.com', - description: 'searches projects by url', - location: 205, - inject: { - bar: { - name: 'bar', - url: 'gitlab.com/bar', - // For historical reasons, `url` is only present if `versions` is there - versions: ['1.0.0'], - version: '1.0.0' - }, - cool: { - name: 'cool', - version: '5.0.0', - versions: ['1.0.0'], - url: 'github.com/cool/cool' - } - } - }, - { - term: 'monad', - description: 'searches projects by keywords', - location: 197, - inject: { - cool: { - name: 'cool', - version: '5.0.0', - keywords: ['monads'] - } - } - } -] - -searches.forEach(function (search) { - test(search.description, function (t) { - setup() - var now = Date.now() - var cacheContents = { - '_updated': now, - bar: { name: 'bar', version: '1.0.0' }, - cool: { name: 'cool', version: '5.0.0' }, - foo: { name: 'foo', version: '2.0.0' }, - other: { name: 'other', version: '1.0.0' } - } - for (var k in search.inject) { - cacheContents[k] = search.inject[k] - } - var fixture = new Tacks(File(cacheContents)) - fixture.create(cachePath) - common.npm([ - 'search', search.term, - '--registry', common.registry, - '--cache', CACHE_DIR, - '--loglevel', 'error', - '--color', 'always' - ].concat(search.extraOpts || []), - {}, - function (err, code, stdout, stderr) { - t.equal(stderr, '', 'no error output') - t.notEqual(stdout, '', 'got output') - t.equal(code, 0, 'search finished successfully') - t.ifErr(err, 'search finished successfully') - // \033 == \u001B - var markStart = '\u001B\\[[0-9][0-9]m' - var markEnd = '\u001B\\[0m' - - var re = new RegExp(markStart + '.*?' + markEnd) - - var cnt = stdout.search(re) - t.equal( - cnt, - search.location, - search.description + ' search for ' + search.term - ) - t.end() - }) - }) -}) - test('cleanup', function (t) { cleanup() + server.close() t.end() }) @@ -358,6 +184,7 @@ function setup () { } function cleanup () { + server.done() process.chdir(osenv.tmpdir()) rimraf.sync(PKG_DIR) } diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index bb5cb29c136..663964616f6 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 5 #define V8_MINOR_VERSION 6 #define V8_BUILD_NUMBER 326 -#define V8_PATCH_LEVEL 56 +#define V8_PATCH_LEVEL 57 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/parsing/parser-base.h b/deps/v8/src/parsing/parser-base.h index bb62f86e3bd..9195aec9908 100644 --- a/deps/v8/src/parsing/parser-base.h +++ b/deps/v8/src/parsing/parser-base.h @@ -5008,7 +5008,7 @@ typename ParserBase::StatementT ParserBase::ParseSwitchStatement( { BlockState cases_block_state(zone(), &scope_state_); - cases_block_state.set_start_position(scanner()->location().beg_pos); + cases_block_state.set_start_position(switch_pos); cases_block_state.SetNonlinear(); typename Types::Target target(this, switch_statement); diff --git a/deps/v8/src/parsing/parser.cc b/deps/v8/src/parsing/parser.cc index 8d8890129f9..3ed7af267e7 100644 --- a/deps/v8/src/parsing/parser.cc +++ b/deps/v8/src/parsing/parser.cc @@ -1729,6 +1729,10 @@ Statement* Parser::RewriteSwitchStatement(Expression* tag, Block* cases_block = factory()->NewBlock(NULL, 1, false, kNoSourcePosition); cases_block->statements()->Add(switch_statement, zone()); cases_block->set_scope(scope); + DCHECK_IMPLIES(scope != nullptr, + switch_statement->position() >= scope->start_position()); + DCHECK_IMPLIES(scope != nullptr, + switch_statement->position() < scope->end_position()); switch_block->statements()->Add(cases_block, zone()); return switch_block; } diff --git a/deps/v8/test/debugger/regress/regress-6085.js b/deps/v8/test/debugger/regress/regress-6085.js new file mode 100644 index 00000000000..ef251516459 --- /dev/null +++ b/deps/v8/test/debugger/regress/regress-6085.js @@ -0,0 +1,49 @@ +// Copyright 2017 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function* serialize() { + debugger; + switch (0) { + case 0: + let x = 1; + return x; // Check scopes + } +} + +let exception = null; +let step_count = 0; +let scopes_checked = false; + +function listener(event, exec_state, event_data, data) { + if (event != Debug.DebugEvent.Break) return; + try { + if (exec_state.frame().sourceLineText().includes("Check scopes")) { + let expected = [ debug.ScopeType.Block, + debug.ScopeType.Local, + debug.ScopeType.Script, + debug.ScopeType.Global ]; + for (let i = 0; i < exec_state.frame().scopeCount(); i++) { + assertEquals(expected[i], exec_state.frame().scope(i).scopeType()); + } + scopes_checked = true; + } + if (step_count++ < 3) exec_state.prepareStep(Debug.StepAction.StepNext); + } catch (e) { + exception = e; + print(e, e.stack); + } +} + + + +let Debug = debug.Debug; +Debug.setListener(listener); + +let gen = serialize(); +gen.next(); + +Debug.setListener(null); +assertNull(exception); +assertEquals(4, step_count); +assertTrue(scopes_checked); diff --git a/doc/api/assert.md b/doc/api/assert.md index 14de5af2ed9..868b9044503 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -12,22 +12,7 @@ added: v0.5.9 * `value` {any} * `message` {any} -An alias of [`assert.ok()`][] . - -```js -const assert = require('assert'); - -assert(true); -// OK -assert(1); -// OK -assert(false); -// throws "AssertionError: false == true" -assert(0); -// throws "AssertionError: 0 == true" -assert(false, 'it\'s false'); -// throws "AssertionError: it's false" -``` +An alias of [`assert.ok()`][]. ## assert.deepEqual(actual, expected[, message]) * `options` {Object} - * `rejectUnauthorized` {boolean} + * `rejectUnauthorized` {boolean} If not `false`, the server certificate is verified + against the list of supplied CAs. An `'error'` event is emitted if + verification fails; `err.code` contains the OpenSSL error code. Defaults to + `true`. * `requestCert` * `callback` {Function} A function that will be called when the renegotiation request has been completed. @@ -769,7 +772,7 @@ changes: connection/disconnection/destruction of `socket` is the user's responsibility, calling `tls.connect()` will not cause `net.connect()` to be called. - * `rejectUnauthorized` {boolean} If `true`, the server certificate is verified + * `rejectUnauthorized` {boolean} If not `false`, the server certificate is verified against the list of supplied CAs. An `'error'` event is emitted if verification fails; `err.code` contains the OpenSSL error code. Defaults to `true`. @@ -1012,9 +1015,9 @@ changes: * `requestCert` {boolean} If `true` the server will request a certificate from clients that connect and attempt to verify that certificate. Defaults to `false`. - * `rejectUnauthorized` {boolean} If `true` the server will reject any + * `rejectUnauthorized` {boolean} If not `false` the server will reject any connection which is not authorized with the list of supplied CAs. This - option only has an effect if `requestCert` is `true`. Defaults to `false`. + option only has an effect if `requestCert` is `true`. Defaults to `true`. * `NPNProtocols` {string[]|Buffer} An array of strings or a `Buffer` naming possible NPN protocols. (Protocols should be ordered by their priority.) * `ALPNProtocols` {string[]|Buffer} An array of strings or a `Buffer` naming @@ -1190,9 +1193,8 @@ changes: opened as a server. * `requestCert` {boolean} `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. -* `rejectUnauthorized` {boolean} `true` to specify whether a server should - automatically reject clients with invalid certificates. Only applies when - `isServer` is `true`. +* `rejectUnauthorized` {boolean} If not `false` a server automatically reject clients + with invalid certificates. Only applies when `isServer` is `true`. * `options` * `secureContext`: An optional TLS context object from [`tls.createSecureContext()`][] diff --git a/doc/changelogs/CHANGELOG_V4.md b/doc/changelogs/CHANGELOG_V4.md index 1548e6c37e4..8408e2063a1 100644 --- a/doc/changelogs/CHANGELOG_V4.md +++ b/doc/changelogs/CHANGELOG_V4.md @@ -7,6 +7,7 @@ +4.8.1
4.8.0
4.7.3
4.7.2
@@ -58,6 +59,172 @@ [Node.js Long Term Support Plan](https://github.com/nodejs/LTS) and will be supported actively until April 2017 and maintained until April 2018. + +## 2017-03-21, Version 4.8.1 'Argon' (LTS), @MylesBorins + +This LTS release comes with 147 commits. This includes 55 which are test related, +41 which are doc related, 11 which are build / tool related, +and 1 commits which are updates to dependencies. + +### Notable Changes + +* **buffer**: The performance of `.toJSON()` is now up to 2859% faster on average. (Brian White) [#10895](https://github.com/nodejs/node/pull/10895) +* **IPC**: Batched writes have been enabled for process IPC on platforms that support Unix Domain Sockets. (Alexey Orlenko) [#10677](https://github.com/nodejs/node/pull/10677) + - Performance gains may be up to 40% for some workloads. +* **http**: + - Control characters are now always rejected when using `http.request()`. (Ben Noordhuis) [#8923](https://github.com/nodejs/node/pull/8923) +* **node**: Heap statistics now support values larger than 4GB. (Ben Noordhuis) [#10186](https://github.com/nodejs/node/pull/10186) + +### Commits + +* [[`77f23ec5af`](https://github.com/nodejs/node/commit/77f23ec5af)] - **assert**: unlock the assert API (Rich Trott) [#11304](https://github.com/nodejs/node/pull/11304) +* [[`090037a41a`](https://github.com/nodejs/node/commit/090037a41a)] - **assert**: remove unneeded condition (Rich Trott) [#11314](https://github.com/nodejs/node/pull/11314) +* [[`75af859af7`](https://github.com/nodejs/node/commit/75af859af7)] - **assert**: apply minor refactoring (Rich Trott) [#11511](https://github.com/nodejs/node/pull/11511) +* [[`994f562858`](https://github.com/nodejs/node/commit/994f562858)] - **assert**: update comments (Kai Cataldo) [#10579](https://github.com/nodejs/node/pull/10579) +* [[`14e57c1102`](https://github.com/nodejs/node/commit/14e57c1102)] - **benchmark**: add more thorough timers benchmarks (Jeremiah Senkpiel) [#10925](https://github.com/nodejs/node/pull/10925) +* [[`850f85d96e`](https://github.com/nodejs/node/commit/850f85d96e)] - **benchmark**: add benchmark for object properties (Michaël Zasso) [#10949](https://github.com/nodejs/node/pull/10949) +* [[`626875f2e4`](https://github.com/nodejs/node/commit/626875f2e4)] - **benchmark**: don't lint autogenerated modules (Brian White) [#10756](https://github.com/nodejs/node/pull/10756) +* [[`9da6ebd73f`](https://github.com/nodejs/node/commit/9da6ebd73f)] - **benchmark**: add dgram bind(+/- params) benchmark (Vse Mozhet Byt) [#11313](https://github.com/nodejs/node/pull/11313) +* [[`a597c11ba4`](https://github.com/nodejs/node/commit/a597c11ba4)] - **benchmark**: improve readability of net benchmarks (Brian White) [#10446](https://github.com/nodejs/node/pull/10446) +* [[`22c25dee92`](https://github.com/nodejs/node/commit/22c25dee92)] - **buffer**: improve toJSON() performance (Brian White) [#10895](https://github.com/nodejs/node/pull/10895) +* [[`af3c21197d`](https://github.com/nodejs/node/commit/af3c21197d)] - **build**: move source files from headers section (Daniel Bevenius) [#10850](https://github.com/nodejs/node/pull/10850) +* [[`4bb61553f0`](https://github.com/nodejs/node/commit/4bb61553f0)] - **build**: disable C4267 conversion compiler warning (Ben Noordhuis) [#11205](https://github.com/nodejs/node/pull/11205) +* [[`6a45ac0ea9`](https://github.com/nodejs/node/commit/6a45ac0ea9)] - **build**: fix newlines in addon build output (Brian White) [#11466](https://github.com/nodejs/node/pull/11466) +* [[`bfc553d55d`](https://github.com/nodejs/node/commit/bfc553d55d)] - **build**: fail on CI if leftover processes (Rich Trott) [#11269](https://github.com/nodejs/node/pull/11269) +* [[`094bfe66aa`](https://github.com/nodejs/node/commit/094bfe66aa)] - **build**: fix node_g target (Daniel Bevenius) [#10153](https://github.com/nodejs/node/pull/10153) +* [[`87db4f7225`](https://github.com/nodejs/node/commit/87db4f7225)] - **build**: Don't regenerate node symlink (sxa555) [#9827](https://github.com/nodejs/node/pull/9827) +* [[`e0dc0ceb37`](https://github.com/nodejs/node/commit/e0dc0ceb37)] - **build**: don't squash signal handlers with --shared (Stewart X Addison) [#10539](https://github.com/nodejs/node/pull/10539) +* [[`4676eec382`](https://github.com/nodejs/node/commit/4676eec382)] - **child_process**: remove empty if condition (cjihrig) [#11427](https://github.com/nodejs/node/pull/11427) +* [[`2b867d2ae5`](https://github.com/nodejs/node/commit/2b867d2ae5)] - **child_process**: refactor internal/child_process.js (Arseniy Maximov) [#11366](https://github.com/nodejs/node/pull/11366) +* [[`c9a92ff494`](https://github.com/nodejs/node/commit/c9a92ff494)] - **crypto**: return the retval of HMAC_Update (Travis Meisenheimer) [#10891](https://github.com/nodejs/node/pull/10891) +* [[`9c53e402d7`](https://github.com/nodejs/node/commit/9c53e402d7)] - **crypto**: freelist_max_len is gone in OpenSSL 1.1.0 (Adam Langley) [#10859](https://github.com/nodejs/node/pull/10859) +* [[`c6f6b029a1`](https://github.com/nodejs/node/commit/c6f6b029a1)] - **crypto**: add cert check issued by StartCom/WoSign (Shigeki Ohtsu) [#9469](https://github.com/nodejs/node/pull/9469) +* [[`c56719f47a`](https://github.com/nodejs/node/commit/c56719f47a)] - **crypto**: Remove expired certs from CNNIC whitelist (Shigeki Ohtsu) [#9469](https://github.com/nodejs/node/pull/9469) +* [[`b48f6ffc63`](https://github.com/nodejs/node/commit/b48f6ffc63)] - **crypto**: use CHECK_NE instead of ABORT or abort (Sam Roberts) [#10413](https://github.com/nodejs/node/pull/10413) +* [[`35a660ee70`](https://github.com/nodejs/node/commit/35a660ee70)] - **crypto**: fix handling of root_cert_store. (Adam Langley) [#9409](https://github.com/nodejs/node/pull/9409) +* [[`3516f35b77`](https://github.com/nodejs/node/commit/3516f35b77)] - **deps**: backport 7c3748a from upstream V8 (Cristian Cavalli) [#10873](https://github.com/nodejs/node/pull/10873) +* [[`f9e121ead8`](https://github.com/nodejs/node/commit/f9e121ead8)] - **dgram**: fix possibly deoptimizing use of arguments (Vse Mozhet Byt) +* [[`fc2bb2c8ef`](https://github.com/nodejs/node/commit/fc2bb2c8ef)] - **doc**: remove Chris Dickinson from active releasers (Ben Noordhuis) [#11011](https://github.com/nodejs/node/pull/11011) +* [[`725a89606b`](https://github.com/nodejs/node/commit/725a89606b)] - **doc**: remove duplicate properties bullet in readme (Javis Sullivan) [#10741](https://github.com/nodejs/node/pull/10741) +* [[`db03294c41`](https://github.com/nodejs/node/commit/db03294c41)] - **doc**: fix typo in http.md (Peter Mescalchin) [#10975](https://github.com/nodejs/node/pull/10975) +* [[`15188900b8`](https://github.com/nodejs/node/commit/15188900b8)] - **doc**: add who to CC list for dgram (cjihrig) [#11035](https://github.com/nodejs/node/pull/11035) +* [[`a0742902bd`](https://github.com/nodejs/node/commit/a0742902bd)] - **doc**: correct and complete dgram's Socket.bind docs (Alex Jordan) [#11025](https://github.com/nodejs/node/pull/11025) +* [[`f464dd837f`](https://github.com/nodejs/node/commit/f464dd837f)] - **doc**: edit CONTRIBUTING.md for clarity (Rich Trott) [#11045](https://github.com/nodejs/node/pull/11045) +* [[`07dfed8f45`](https://github.com/nodejs/node/commit/07dfed8f45)] - **doc**: fix confusing example in dns.md (Vse Mozhet Byt) [#11022](https://github.com/nodejs/node/pull/11022) +* [[`d55d760086`](https://github.com/nodejs/node/commit/d55d760086)] - **doc**: add personal pronouns option (Rich Trott) [#11089](https://github.com/nodejs/node/pull/11089) +* [[`b86843a463`](https://github.com/nodejs/node/commit/b86843a463)] - **doc**: clarify msg when doc/api/cli.md not updated (Stewart X Addison) [#10872](https://github.com/nodejs/node/pull/10872) +* [[`c2d70908e6`](https://github.com/nodejs/node/commit/c2d70908e6)] - **doc**: edit stability text for clarity and style (Rich Trott) [#11112](https://github.com/nodejs/node/pull/11112) +* [[`115448ec94`](https://github.com/nodejs/node/commit/115448ec94)] - **doc**: remove assertions about assert (Rich Trott) [#11113](https://github.com/nodejs/node/pull/11113) +* [[`e90317d739`](https://github.com/nodejs/node/commit/e90317d739)] - **doc**: fix "initial delay" link in http.md (Timo Tijhof) [#11108](https://github.com/nodejs/node/pull/11108) +* [[`788d736ab6`](https://github.com/nodejs/node/commit/788d736ab6)] - **doc**: typographical fixes in COLLABORATOR_GUIDE.md (Anna Henningsen) [#11163](https://github.com/nodejs/node/pull/11163) +* [[`2016aa4e07`](https://github.com/nodejs/node/commit/2016aa4e07)] - **doc**: add not-an-aardvark as ESLint contact (Rich Trott) [#11169](https://github.com/nodejs/node/pull/11169) +* [[`2b6ee39264`](https://github.com/nodejs/node/commit/2b6ee39264)] - **doc**: improve testing guide (Joyee Cheung) [#11150](https://github.com/nodejs/node/pull/11150) +* [[`aae768c599`](https://github.com/nodejs/node/commit/aae768c599)] - **doc**: remove extraneous paragraph from assert doc (Rich Trott) [#11174](https://github.com/nodejs/node/pull/11174) +* [[`ca4b2f6154`](https://github.com/nodejs/node/commit/ca4b2f6154)] - **doc**: fix typo in dgram doc (Rich Trott) [#11186](https://github.com/nodejs/node/pull/11186) +* [[`bb1e97c31a`](https://github.com/nodejs/node/commit/bb1e97c31a)] - **doc**: add and fix System Error properties (Daiki Arai) [#10986](https://github.com/nodejs/node/pull/10986) +* [[`e1e02efac5`](https://github.com/nodejs/node/commit/e1e02efac5)] - **doc**: clarify the behavior of Buffer.byteLength (Nikolai Vavilov) [#11238](https://github.com/nodejs/node/pull/11238) +* [[`30d9202f54`](https://github.com/nodejs/node/commit/30d9202f54)] - **doc**: improve consistency in documentation titles (Vse Mozhet Byt) [#11230](https://github.com/nodejs/node/pull/11230) +* [[`10afa8befc`](https://github.com/nodejs/node/commit/10afa8befc)] - **doc**: drop "and io.js" from release section (Ben Noordhuis) [#11054](https://github.com/nodejs/node/pull/11054) +* [[`6f1db35e27`](https://github.com/nodejs/node/commit/6f1db35e27)] - **doc**: update email and add personal pronoun (JungMinu) [#11318](https://github.com/nodejs/node/pull/11318) +* [[`61ac3346ba`](https://github.com/nodejs/node/commit/61ac3346ba)] - **doc**: update code examples in domain.md (Vse Mozhet Byt) [#11110](https://github.com/nodejs/node/pull/11110) +* [[`0c9ea4fe8b`](https://github.com/nodejs/node/commit/0c9ea4fe8b)] - **doc**: dns examples implied string args were arrays (Sam Roberts) [#11350](https://github.com/nodejs/node/pull/11350) +* [[`485ec6c180`](https://github.com/nodejs/node/commit/485ec6c180)] - **doc**: change STYLE-GUIDE to STYLE_GUIDE (Dean Coakley) [#11460](https://github.com/nodejs/node/pull/11460) +* [[`41bf266b0a`](https://github.com/nodejs/node/commit/41bf266b0a)] - **doc**: add STYLE_GUIDE (moved from nodejs/docs) (Gibson Fahnestock) [#11321](https://github.com/nodejs/node/pull/11321) +* [[`6abfcd560b`](https://github.com/nodejs/node/commit/6abfcd560b)] - **doc**: add comment for net.Server's error event (QianJin2013) [#11136](https://github.com/nodejs/node/pull/11136) +* [[`f4bc12dd11`](https://github.com/nodejs/node/commit/f4bc12dd11)] - **doc**: note message event listeners ref IPC channels (Diego Rodríguez Baquero) [#11494](https://github.com/nodejs/node/pull/11494) +* [[`09c9105a79`](https://github.com/nodejs/node/commit/09c9105a79)] - **doc**: argument types for assert methods (Amelia Clarke) [#11548](https://github.com/nodejs/node/pull/11548) +* [[`d622b67302`](https://github.com/nodejs/node/commit/d622b67302)] - **doc**: document clientRequest.aborted (Zach Bjornson) [#11544](https://github.com/nodejs/node/pull/11544) +* [[`d0dbf12884`](https://github.com/nodejs/node/commit/d0dbf12884)] - **doc**: update TheAlphaNerd to MylesBorins (Myles Borins) [#10586](https://github.com/nodejs/node/pull/10586) +* [[`05273c5a4e`](https://github.com/nodejs/node/commit/05273c5a4e)] - **doc**: update AUTHORS list to fix name (Noah Rose Ledesma) [#10945](https://github.com/nodejs/node/pull/10945) +* [[`79f700c891`](https://github.com/nodejs/node/commit/79f700c891)] - **doc**: add TimothyGu to collaborators (Timothy Gu) [#10954](https://github.com/nodejs/node/pull/10954) +* [[`e656a4244a`](https://github.com/nodejs/node/commit/e656a4244a)] - **doc**: add edsadr to collaborators (Adrian Estrada) [#10883](https://github.com/nodejs/node/pull/10883) +* [[`6d0e1621e5`](https://github.com/nodejs/node/commit/6d0e1621e5)] - **doc**: clarifying variables in fs.write() (Jessica Quynh Tran) [#9792](https://github.com/nodejs/node/pull/9792) +* [[`7287dddd69`](https://github.com/nodejs/node/commit/7287dddd69)] - **doc**: add links for zlib convenience methods (Anna Henningsen) [#10829](https://github.com/nodejs/node/pull/10829) +* [[`b10842ac77`](https://github.com/nodejs/node/commit/b10842ac77)] - **doc**: sort require statements in tests (Sam Roberts) [#10616](https://github.com/nodejs/node/pull/10616) +* [[`8f0e31b2d9`](https://github.com/nodejs/node/commit/8f0e31b2d9)] - **doc**: add test naming information to guide (Rich Trott) [#10584](https://github.com/nodejs/node/pull/10584) +* [[`56b779db93`](https://github.com/nodejs/node/commit/56b779db93)] - **doc**: "s/git apply/git am -3" in V8 guide (Myles Borins) [#10665](https://github.com/nodejs/node/pull/10665) +* [[`3be7a7adb5`](https://github.com/nodejs/node/commit/3be7a7adb5)] - **doc**: update LTS info for current releases (Evan Lucas) [#10720](https://github.com/nodejs/node/pull/10720) +* [[`530adfdb2a`](https://github.com/nodejs/node/commit/530adfdb2a)] - **doc**: improve rinfo object documentation (Matt Crummey) [#10050](https://github.com/nodejs/node/pull/10050) +* [[`48b5097ea8`](https://github.com/nodejs/node/commit/48b5097ea8)] - **http**: make request.abort() destroy the socket (Luigi Pinca) [#10818](https://github.com/nodejs/node/pull/10818) +* [[`15231aa6e5`](https://github.com/nodejs/node/commit/15231aa6e5)] - **http**: reject control characters in http.request() (Ben Noordhuis) [#8923](https://github.com/nodejs/node/pull/8923) +* [[`fc2cd63998`](https://github.com/nodejs/node/commit/fc2cd63998)] - **lib,src**: support values \> 4GB in heap statistics (Ben Noordhuis) [#10186](https://github.com/nodejs/node/pull/10186) +* [[`533d2bf0a9`](https://github.com/nodejs/node/commit/533d2bf0a9)] - **meta**: add explicit deprecation and semver-major policy (James M Snell) [#7964](https://github.com/nodejs/node/pull/7964) +* [[`923309adef`](https://github.com/nodejs/node/commit/923309adef)] - **meta**: remove Chris Dickinson from CTC (Chris Dickinson) [#11267](https://github.com/nodejs/node/pull/11267) +* [[`342c3e2bb4`](https://github.com/nodejs/node/commit/342c3e2bb4)] - **meta**: adding Italo A. Casas PGP Fingerprint (Italo A. Casas) [#11202](https://github.com/nodejs/node/pull/11202) +* [[`434b00be8a`](https://github.com/nodejs/node/commit/434b00be8a)] - **meta**: decharter the http working group (James M Snell) [#10604](https://github.com/nodejs/node/pull/10604) +* [[`a7df345921`](https://github.com/nodejs/node/commit/a7df345921)] - **net**: prefer === to == (Arseniy Maximov) [#11513](https://github.com/nodejs/node/pull/11513) +* [[`396688f075`](https://github.com/nodejs/node/commit/396688f075)] - **readline**: refactor construct Interface (Jackson Tian) [#4740](https://github.com/nodejs/node/pull/4740) +* [[`a40f8429e6`](https://github.com/nodejs/node/commit/a40f8429e6)] - **readline**: update 6 comparions to strict (Umair Ishaq) [#11078](https://github.com/nodejs/node/pull/11078) +* [[`90d8e118fb`](https://github.com/nodejs/node/commit/90d8e118fb)] - **src**: add a missing space in node_os.cc (Alexey Orlenko) [#10931](https://github.com/nodejs/node/pull/10931) +* [[`279cb09cc3`](https://github.com/nodejs/node/commit/279cb09cc3)] - **src**: enable writev for pipe handles on Unix (Alexey Orlenko) [#10677](https://github.com/nodejs/node/pull/10677) +* [[`a557d6ce1d`](https://github.com/nodejs/node/commit/a557d6ce1d)] - **src**: unconsume stream fix in internal http impl (Roee Kasher) [#11015](https://github.com/nodejs/node/pull/11015) +* [[`c4e1af712e`](https://github.com/nodejs/node/commit/c4e1af712e)] - **src**: remove unused typedef (Ben Noordhuis) [#11322](https://github.com/nodejs/node/pull/11322) +* [[`da2adb7133`](https://github.com/nodejs/node/commit/da2adb7133)] - **src**: update http-parser link (Daniel Bevenius) [#11477](https://github.com/nodejs/node/pull/11477) +* [[`2f48001574`](https://github.com/nodejs/node/commit/2f48001574)] - **src**: use ABORT() macro instead of abort() (Evan Lucas) [#9613](https://github.com/nodejs/node/pull/9613) +* [[`a9eb093ce3`](https://github.com/nodejs/node/commit/a9eb093ce3)] - **src**: fix memory leak introduced in 34febfbf4 (Ben Noordhuis) [#9604](https://github.com/nodejs/node/pull/9604) +* [[`f854d8c789`](https://github.com/nodejs/node/commit/f854d8c789)] - **test**: increase setMulticastLoopback() coverage (cjihrig) [#11277](https://github.com/nodejs/node/pull/11277) +* [[`1df09f9d37`](https://github.com/nodejs/node/commit/1df09f9d37)] - **test**: add known_issues test for #10223 (AnnaMag) [#11024](https://github.com/nodejs/node/pull/11024) +* [[`be34b629de`](https://github.com/nodejs/node/commit/be34b629de)] - **test**: increase coverage for stream's duplex (abouthiroppy) [#10963](https://github.com/nodejs/node/pull/10963) +* [[`dc24127e5c`](https://github.com/nodejs/node/commit/dc24127e5c)] - **test**: allow for slow hosts in spawnSync() test (Rich Trott) [#10998](https://github.com/nodejs/node/pull/10998) +* [[`2f4b6bda97`](https://github.com/nodejs/node/commit/2f4b6bda97)] - **test**: expand test coverage of fs.js (Vinícius do Carmo) [#10947](https://github.com/nodejs/node/pull/10947) +* [[`3f6a2dbc2f`](https://github.com/nodejs/node/commit/3f6a2dbc2f)] - **test**: enhance test-timers (Rich Trott) [#10960](https://github.com/nodejs/node/pull/10960) +* [[`6ca9901d8b`](https://github.com/nodejs/node/commit/6ca9901d8b)] - **test**: add process.assert's test (abouthiroppy) [#10911](https://github.com/nodejs/node/pull/10911) +* [[`d8af5a7431`](https://github.com/nodejs/node/commit/d8af5a7431)] - **test**: improve code in test-crypto-verify (Adrian Estrada) [#10845](https://github.com/nodejs/node/pull/10845) +* [[`4d1f7b1df8`](https://github.com/nodejs/node/commit/4d1f7b1df8)] - **test**: add dgram.Socket.prototype.bind's test (abouthiroppy) [#10894](https://github.com/nodejs/node/pull/10894) +* [[`6c1d82c68a`](https://github.com/nodejs/node/commit/6c1d82c68a)] - **test**: improving coverage for dgram (abouthiroppy) [#10783](https://github.com/nodejs/node/pull/10783) +* [[`017afd48fd`](https://github.com/nodejs/node/commit/017afd48fd)] - **test**: improve code in test-console-instance (Adrian Estrada) [#10813](https://github.com/nodejs/node/pull/10813) +* [[`1b1ba741c3`](https://github.com/nodejs/node/commit/1b1ba741c3)] - **test**: improve code in test-domain-multi (Adrian Estrada) [#10798](https://github.com/nodejs/node/pull/10798) +* [[`ee27917a65`](https://github.com/nodejs/node/commit/ee27917a65)] - **test**: improve test-stream2-large-read-stall (stefan judis) [#10725](https://github.com/nodejs/node/pull/10725) +* [[`9ac2316595`](https://github.com/nodejs/node/commit/9ac2316595)] - **test**: improve code in test-http-host-headers (Adrian Estrada) [#10830](https://github.com/nodejs/node/pull/10830) +* [[`a9278a063f`](https://github.com/nodejs/node/commit/a9278a063f)] - **test**: refactor cluster-preload.js (abouthiroppy) [#10701](https://github.com/nodejs/node/pull/10701) +* [[`db60d92e15`](https://github.com/nodejs/node/commit/db60d92e15)] - **test**: test hmac binding robustness (Sam Roberts) [#10923](https://github.com/nodejs/node/pull/10923) +* [[`a1a850f066`](https://github.com/nodejs/node/commit/a1a850f066)] - **test**: don't connect to :: (use localhost instead) (Gibson Fahnestock) +* [[`b3a8e95af3`](https://github.com/nodejs/node/commit/b3a8e95af3)] - **test**: improve test-assert (richnologies) [#10916](https://github.com/nodejs/node/pull/10916) +* [[`56970efe51`](https://github.com/nodejs/node/commit/56970efe51)] - **test**: increase coverage for punycode's decode (abouthiroppy) [#10940](https://github.com/nodejs/node/pull/10940) +* [[`df69c2148a`](https://github.com/nodejs/node/commit/df69c2148a)] - **test**: check fd 0,1,2 are used, not access mode (John Barboza) [#10339](https://github.com/nodejs/node/pull/10339) +* [[`7bceb4fb48`](https://github.com/nodejs/node/commit/7bceb4fb48)] - **test**: add message verification on assert.throws (Travis Meisenheimer) [#10890](https://github.com/nodejs/node/pull/10890) +* [[`1c223ecc70`](https://github.com/nodejs/node/commit/1c223ecc70)] - **test**: add http-common's test (abouthiroppy) [#10832](https://github.com/nodejs/node/pull/10832) +* [[`89e9da6b6d`](https://github.com/nodejs/node/commit/89e9da6b6d)] - **test**: tests for _readableStream.awaitDrain (Mark) [#8914](https://github.com/nodejs/node/pull/8914) +* [[`53b0f413cd`](https://github.com/nodejs/node/commit/53b0f413cd)] - **test**: improve the code in test-process-cpuUsage (Adrian Estrada) [#10714](https://github.com/nodejs/node/pull/10714) +* [[`b3d1700d1f`](https://github.com/nodejs/node/commit/b3d1700d1f)] - **test**: improve tests in pummel/test-exec (Chase Starr) [#10757](https://github.com/nodejs/node/pull/10757) +* [[`6e7dfb1f45`](https://github.com/nodejs/node/commit/6e7dfb1f45)] - **test**: fix temp-dir option in tools/test.py (Gibson Fahnestock) [#10723](https://github.com/nodejs/node/pull/10723) +* [[`9abde3ac6e`](https://github.com/nodejs/node/commit/9abde3ac6e)] - **test**: use realpath for NODE_TEST_DIR in common.js (Gibson Fahnestock) [#10723](https://github.com/nodejs/node/pull/10723) +* [[`f86c64a13a`](https://github.com/nodejs/node/commit/f86c64a13a)] - **test**: refactor the code of test-keep-alive.js (sivaprasanna) [#10684](https://github.com/nodejs/node/pull/10684) +* [[`4d51db87dc`](https://github.com/nodejs/node/commit/4d51db87dc)] - **test**: refactor test-doctool-html.js (abouthiroppy) [#10696](https://github.com/nodejs/node/pull/10696) +* [[`ab65429e44`](https://github.com/nodejs/node/commit/ab65429e44)] - **test**: refactor test-watch-file.js (sivaprasanna) [#10679](https://github.com/nodejs/node/pull/10679) +* [[`4453c0c1dc`](https://github.com/nodejs/node/commit/4453c0c1dc)] - **test**: refactor the code in test-child-process-spawn-loop.js (sivaprasanna) [#10605](https://github.com/nodejs/node/pull/10605) +* [[`42b86ea968`](https://github.com/nodejs/node/commit/42b86ea968)] - **test**: improve test-http-chunked-304 (Adrian Estrada) [#10462](https://github.com/nodejs/node/pull/10462) +* [[`1ae95e64ee`](https://github.com/nodejs/node/commit/1ae95e64ee)] - **test**: improve test-fs-readfile-zero-byte-liar (Adrian Estrada) [#10570](https://github.com/nodejs/node/pull/10570) +* [[`3f3c78d785`](https://github.com/nodejs/node/commit/3f3c78d785)] - **test**: refactor test-fs-utimes (Junshu Okamoto) [#9290](https://github.com/nodejs/node/pull/9290) +* [[`50a868b3f7`](https://github.com/nodejs/node/commit/50a868b3f7)] - **test**: require handler to be run in sigwinch test (Rich Trott) [#11068](https://github.com/nodejs/node/pull/11068) +* [[`c1f45ec2d0`](https://github.com/nodejs/node/commit/c1f45ec2d0)] - **test**: add 2nd argument to throws in test-assert (Marlena Compton) [#11061](https://github.com/nodejs/node/pull/11061) +* [[`f24aa7e071`](https://github.com/nodejs/node/commit/f24aa7e071)] - **test**: improve error messages in test-npm-install (Gonen Dukas) [#11027](https://github.com/nodejs/node/pull/11027) +* [[`1db89d4009`](https://github.com/nodejs/node/commit/1db89d4009)] - **test**: improve coverage on removeListeners functions (matsuda-koushi) [#11140](https://github.com/nodejs/node/pull/11140) +* [[`c532c16e53`](https://github.com/nodejs/node/commit/c532c16e53)] - **test**: increase specificity in dgram test (Rich Trott) [#11187](https://github.com/nodejs/node/pull/11187) +* [[`cb81ae8eea`](https://github.com/nodejs/node/commit/cb81ae8eea)] - **test**: add vm module edge cases (Franziska Hinkelmann) [#11265](https://github.com/nodejs/node/pull/11265) +* [[`8629c956c3`](https://github.com/nodejs/node/commit/8629c956c3)] - **test**: improve punycode test coverage (Sebastian Van Sande) [#11144](https://github.com/nodejs/node/pull/11144) +* [[`caf1ba15f9`](https://github.com/nodejs/node/commit/caf1ba15f9)] - **test**: add coverage for dgram _createSocketHandle() (cjihrig) [#11291](https://github.com/nodejs/node/pull/11291) +* [[`d729e52ef3`](https://github.com/nodejs/node/commit/d729e52ef3)] - **test**: improve crypto coverage (Akito Ito) [#11280](https://github.com/nodejs/node/pull/11280) +* [[`d1a8588cab`](https://github.com/nodejs/node/commit/d1a8588cab)] - **test**: improve message in net-connect-local-error (Rich Trott) [#11393](https://github.com/nodejs/node/pull/11393) +* [[`f2fb4143b4`](https://github.com/nodejs/node/commit/f2fb4143b4)] - **test**: refactor test-dgram-membership (Rich Trott) [#11388](https://github.com/nodejs/node/pull/11388) +* [[`bf4703d66f`](https://github.com/nodejs/node/commit/bf4703d66f)] - **test**: remove unused args and comparison fix (Alexander) [#11396](https://github.com/nodejs/node/pull/11396) +* [[`28471c23ff`](https://github.com/nodejs/node/commit/28471c23ff)] - **test**: refactor test-http-response-splitting (Arseniy Maximov) [#11429](https://github.com/nodejs/node/pull/11429) +* [[`cd3e17e248`](https://github.com/nodejs/node/commit/cd3e17e248)] - **test**: improve coverage in test-crypto.dh (Eric Christie) [#11253](https://github.com/nodejs/node/pull/11253) +* [[`fa681ea55a`](https://github.com/nodejs/node/commit/fa681ea55a)] - **test**: add regex check to test-module-loading (Tarang Hirani) [#11413](https://github.com/nodejs/node/pull/11413) +* [[`f0eee61a93`](https://github.com/nodejs/node/commit/f0eee61a93)] - **test**: throw check in test-zlib-write-after-close (Jason Wilson) [#11482](https://github.com/nodejs/node/pull/11482) +* [[`f0c7c7fad4`](https://github.com/nodejs/node/commit/f0c7c7fad4)] - **test**: fix flaky test-vm-timeout-rethrow (Kunal Pathak) [#11530](https://github.com/nodejs/node/pull/11530) +* [[`53f2848dc8`](https://github.com/nodejs/node/commit/53f2848dc8)] - **test**: favor assertions over console logging (Rich Trott) [#11547](https://github.com/nodejs/node/pull/11547) +* [[`0109321fd8`](https://github.com/nodejs/node/commit/0109321fd8)] - **test**: refactor test-https-truncate (Rich Trott) [#10225](https://github.com/nodejs/node/pull/10225) +* [[`536733697c`](https://github.com/nodejs/node/commit/536733697c)] - **test**: simplify test-http-client-unescaped-path (Rod Vagg) [#9649](https://github.com/nodejs/node/pull/9649) +* [[`4ce9bfb4e7`](https://github.com/nodejs/node/commit/4ce9bfb4e7)] - **test**: exclude pseudo-tty test pertinent to #11541 (Gireesh Punathil) [#11602](https://github.com/nodejs/node/pull/11602) +* [[`53dd1a8539`](https://github.com/nodejs/node/commit/53dd1a8539)] - **tls**: do not crash on STARTTLS when OCSP requested (Fedor Indutny) [#10706](https://github.com/nodejs/node/pull/10706) +* [[`e607ff52fa`](https://github.com/nodejs/node/commit/e607ff52fa)] - **tools**: rename eslintrc to an undeprecated format (Sakthipriyan Vairamani) [#7699](https://github.com/nodejs/node/pull/7699) +* [[`6648b729b7`](https://github.com/nodejs/node/commit/6648b729b7)] - **tools**: add compile_commands.json gyp generator (Ben Noordhuis) [#7986](https://github.com/nodejs/node/pull/7986) +* [[`8f49962f47`](https://github.com/nodejs/node/commit/8f49962f47)] - **tools**: suggest python2 command in configure (Roman Reiss) [#11375](https://github.com/nodejs/node/pull/11375) +* [[`4b83a83c06`](https://github.com/nodejs/node/commit/4b83a83c06)] - **tools,doc**: add Google Analytics tracking. (Phillip Johnsen) [#6601](https://github.com/nodejs/node/pull/6601) +* [[`ef63af6006`](https://github.com/nodejs/node/commit/ef63af6006)] - **tty**: avoid oob warning in TTYWrap::GetWindowSize() (Dmitry Tsvettsikh) [#11454](https://github.com/nodejs/node/pull/11454) +* [[`2c84601062`](https://github.com/nodejs/node/commit/2c84601062)] - **util**: don't init Debug if it's not needed yet (Bryan English) [#8452](https://github.com/nodejs/node/pull/8452) + ## 2017-02-21, Version 4.8.0 'Argon' (LTS), @MylesBorins diff --git a/doc/changelogs/CHANGELOG_V6.md b/doc/changelogs/CHANGELOG_V6.md index b355732983a..ab2ab616d3d 100644 --- a/doc/changelogs/CHANGELOG_V6.md +++ b/doc/changelogs/CHANGELOG_V6.md @@ -7,6 +7,7 @@ +6.10.1
6.10.0
6.9.5
6.9.4
@@ -46,6 +47,333 @@ [Node.js Long Term Support Plan](https://github.com/nodejs/LTS) and will be supported actively until April 2018 and maintained until April 2019. + +## 2017-03-21, Version 6.10.1 'Boron' (LTS), @MylesBorins + +This LTS release comes with 297 commits. This includes 124 which are test related, +79 which are doc related, 16 which are build / tool related and 4 commits which are updates to dependencies. + +### Notable changes + +* **performance**: The performance of several APIs has been improved. + - `Buffer.compare()` is up to 35% faster on average. (Brian White) [#10927](https://github.com/nodejs/node/pull/10927) + - `buffer.toJSON()` is up to 2859% faster on average. (Brian White) [#10895](https://github.com/nodejs/node/pull/10895) + - `fs.*statSync()` functions are now up to 9.3% faster on average. (Brian White) [#11522](https://github.com/nodejs/node/pull/11522) + - `os.loadavg` is up to 151% faster. (Brian White) [#11516](https://github.com/nodejs/node/pull/11516) + - `process.memoryUsage()` is up to 34% faster. (Brian White) [#11497](https://github.com/nodejs/node/pull/11497) + - `querystring.unescape()` for `Buffer`s is 15% faster on average. (Brian White) [#10837](https://github.com/nodejs/node/pull/10837) + - `querystring.stringify()` is up to 7.8% faster on average. (Brian White) [#10852](https://github.com/nodejs/node/pull/10852) + - `querystring.parse()` is up to 21% faster on average. (Brian White) [#10874](https://github.com/nodejs/node/pull/10874) +* **IPC**: Batched writes have been enabled for process IPC on platforms that support Unix Domain Sockets. (Alexey Orlenko) [#10677](https://github.com/nodejs/node/pull/10677) + - Performance gains may be up to 40% for some workloads. +* **child_process**: `spawnSync` now returns a null `status` when child is terminated by a signal. (cjihrig) [#11288](https://github.com/nodejs/node/pull/11288) + - This fixes the behavior to act like `spawn()` does. +* **http**: + - Control characters are now always rejected when using `http.request()`. (Ben Noordhuis) [#8923](https://github.com/nodejs/node/pull/8923) + - Debug messages have been added for cases when headers contain invalid values. (Evan Lucas) [#9195](https://github.com/nodejs/node/pull/9195) +* **node**: Heap statistics now support values larger than 4GB. (Ben Noordhuis) [#10186](https://github.com/nodejs/node/pull/10186) +* **timers**: Timer callbacks now always maintain order when interacting with domain error handling. (John Barboza) [#10522](https://github.com/nodejs/node/pull/10522) + +### Commits + +* [[`fb75bed078`](https://github.com/nodejs/node/commit/fb75bed078)] - **assert**: unlock the assert API (Rich Trott) [#11304](https://github.com/nodejs/node/pull/11304) +* [[`32b264c33b`](https://github.com/nodejs/node/commit/32b264c33b)] - **assert**: remove unneeded condition (Rich Trott) [#11314](https://github.com/nodejs/node/pull/11314) +* [[`a0c705ef79`](https://github.com/nodejs/node/commit/a0c705ef79)] - **assert**: apply minor refactoring (Rich Trott) [#11511](https://github.com/nodejs/node/pull/11511) +* [[`7ecfe4971a`](https://github.com/nodejs/node/commit/7ecfe4971a)] - **assert**: update comments (Kai Cataldo) [#10579](https://github.com/nodejs/node/pull/10579) +* [[`4d6fa8d040`](https://github.com/nodejs/node/commit/4d6fa8d040)] - **benchmark**: add more thorough timers benchmarks (Jeremiah Senkpiel) [#10925](https://github.com/nodejs/node/pull/10925) +* [[`406e623b13`](https://github.com/nodejs/node/commit/406e623b13)] - **benchmark**: add benchmark for object properties (Michaël Zasso) [#10949](https://github.com/nodejs/node/pull/10949) +* [[`7ee04c6015`](https://github.com/nodejs/node/commit/7ee04c6015)] - **benchmark**: don't lint autogenerated modules (Brian White) [#10756](https://github.com/nodejs/node/pull/10756) +* [[`d22d7cce7c`](https://github.com/nodejs/node/commit/d22d7cce7c)] - **benchmark**: move punycode benchmark out of net (Brian White) [#10446](https://github.com/nodejs/node/pull/10446) +* [[`6b361611c3`](https://github.com/nodejs/node/commit/6b361611c3)] - **benchmark**: move setImmediate benchmarks to timers (Joshua Colvin) [#11010](https://github.com/nodejs/node/pull/11010) +* [[`a469ce5826`](https://github.com/nodejs/node/commit/a469ce5826)] - **benchmark**: add assert.deep\[Strict\]Equal benchmarks (Joyee Cheung) [#11092](https://github.com/nodejs/node/pull/11092) +* [[`eca1e80722`](https://github.com/nodejs/node/commit/eca1e80722)] - **benchmark**: add dgram bind(+/- params) benchmark (Vse Mozhet Byt) [#11313](https://github.com/nodejs/node/pull/11313) +* [[`06c339dcce`](https://github.com/nodejs/node/commit/06c339dcce)] - **benchmark**: improve readability of net benchmarks (Brian White) [#10446](https://github.com/nodejs/node/pull/10446) +* [[`b4cf8c4036`](https://github.com/nodejs/node/commit/b4cf8c4036)] - **benchmark,lib,test**: adjust for linting (Rich Trott) [#10561](https://github.com/nodejs/node/pull/10561) +* [[`e397e6f94a`](https://github.com/nodejs/node/commit/e397e6f94a)] - **buffer**: improve compare() performance (Brian White) [#10927](https://github.com/nodejs/node/pull/10927) +* [[`2b52859535`](https://github.com/nodejs/node/commit/2b52859535)] - **buffer**: fix comments in bidirectionalIndexOf (dcposch@dcpos.ch) [#10162](https://github.com/nodejs/node/pull/10162) +* [[`f7879d98f8`](https://github.com/nodejs/node/commit/f7879d98f8)] - **buffer**: improve toJSON() performance (Brian White) [#10895](https://github.com/nodejs/node/pull/10895) +* [[`f83d035c50`](https://github.com/nodejs/node/commit/f83d035c50)] - **buffer**: convert offset & length to int properly (Sakthipriyan Vairamani (thefourtheye)) [#11176](https://github.com/nodejs/node/pull/11176) +* [[`cda593774f`](https://github.com/nodejs/node/commit/cda593774f)] - **build**: sort sources alphabetically (Daniel Bevenius) [#10892](https://github.com/nodejs/node/pull/10892) +* [[`2d31fd8bf7`](https://github.com/nodejs/node/commit/2d31fd8bf7)] - **build**: move source files from headers section (Daniel Bevenius) [#10850](https://github.com/nodejs/node/pull/10850) +* [[`b7c5295437`](https://github.com/nodejs/node/commit/b7c5295437)] - **build**: don't squash signal handlers with --shared (Stewart X Addison) [#10539](https://github.com/nodejs/node/pull/10539) +* [[`6772b1d81c`](https://github.com/nodejs/node/commit/6772b1d81c)] - **build**: disable C4267 conversion compiler warning (Ben Noordhuis) [#11205](https://github.com/nodejs/node/pull/11205) +* [[`93416e9b7a`](https://github.com/nodejs/node/commit/93416e9b7a)] - **build**: fix newlines in addon build output (Brian White) [#11466](https://github.com/nodejs/node/pull/11466) +* [[`2d5cb3b870`](https://github.com/nodejs/node/commit/2d5cb3b870)] - **build**: fail on CI if leftover processes (Rich Trott) [#11269](https://github.com/nodejs/node/pull/11269) +* [[`edcca78f10`](https://github.com/nodejs/node/commit/edcca78f10)] - **build**: add rule to clean addon tests build (Joyee Cheung) [#11519](https://github.com/nodejs/node/pull/11519) +* [[`0200a5a74e`](https://github.com/nodejs/node/commit/0200a5a74e)] - **build**: fix node_g target (Daniel Bevenius) [#10153](https://github.com/nodejs/node/pull/10153) +* [[`f44c0a5d7a`](https://github.com/nodejs/node/commit/f44c0a5d7a)] - **build**: Don't regenerate node symlink (sxa555) [#9827](https://github.com/nodejs/node/pull/9827) +* [[`947d07bd87`](https://github.com/nodejs/node/commit/947d07bd87)] - **child_process**: exit spawnSync with null on signal (cjihrig) [#11288](https://github.com/nodejs/node/pull/11288) +* [[`4179c7050f`](https://github.com/nodejs/node/commit/4179c7050f)] - **child_process**: move anonymous class to top level (Jackson Tian) [#11147](https://github.com/nodejs/node/pull/11147) +* [[`818cef848e`](https://github.com/nodejs/node/commit/818cef848e)] - **child_process**: remove empty if condition (cjihrig) [#11427](https://github.com/nodejs/node/pull/11427) +* [[`c371fdcf34`](https://github.com/nodejs/node/commit/c371fdcf34)] - **child_process**: refactor internal/child_process.js (Arseniy Maximov) [#11366](https://github.com/nodejs/node/pull/11366) +* [[`b662c117cb`](https://github.com/nodejs/node/commit/b662c117cb)] - **crypto**: return the retval of HMAC_Update (Travis Meisenheimer) [#10891](https://github.com/nodejs/node/pull/10891) +* [[`44510197dd`](https://github.com/nodejs/node/commit/44510197dd)] - **crypto**: freelist_max_len is gone in OpenSSL 1.1.0 (Adam Langley) [#10859](https://github.com/nodejs/node/pull/10859) +* [[`34614af53b`](https://github.com/nodejs/node/commit/34614af53b)] - **crypto**: add cert check issued by StartCom/WoSign (Shigeki Ohtsu) [#9469](https://github.com/nodejs/node/pull/9469) +* [[`b4b3bb4c5d`](https://github.com/nodejs/node/commit/b4b3bb4c5d)] - **crypto**: Remove expired certs from CNNIC whitelist (Shigeki Ohtsu) [#9469](https://github.com/nodejs/node/pull/9469) +* [[`1f44922e34`](https://github.com/nodejs/node/commit/1f44922e34)] - **crypto**: use CHECK_NE instead of ABORT or abort (Sam Roberts) [#10413](https://github.com/nodejs/node/pull/10413) +* [[`ccb6045f2d`](https://github.com/nodejs/node/commit/ccb6045f2d)] - **crypto,tls**: fix mutability of return values (Rich Trott) [#10795](https://github.com/nodejs/node/pull/10795) +* [[`3ab070d4e1`](https://github.com/nodejs/node/commit/3ab070d4e1)] - **deps**: backport dfb8d33 from V8 upstream (Michaël Zasso) [#11483](https://github.com/nodejs/node/pull/11483) +* [[`3fc6a2247f`](https://github.com/nodejs/node/commit/3fc6a2247f)] - **deps**: cherry-pick a814b8a from upstream V8 (ishell@chromium.org) [#10733](https://github.com/nodejs/node/pull/10733) +* [[`254cb1cb77`](https://github.com/nodejs/node/commit/254cb1cb77)] - **deps**: back-port 73ee7943 from v8 upstream (Ben Noordhuis) [#9293](https://github.com/nodejs/node/pull/9293) +* [[`e774de1685`](https://github.com/nodejs/node/commit/e774de1685)] - **deps**: back-port 306c412c from v8 upstream (Ben Noordhuis) [#9293](https://github.com/nodejs/node/pull/9293) +* [[`e5d1e273d7`](https://github.com/nodejs/node/commit/e5d1e273d7)] - **dgram**: fix possibly deoptimizing use of arguments (Vse Mozhet Byt) [#11242](https://github.com/nodejs/node/pull/11242) +* [[`c7257e716f`](https://github.com/nodejs/node/commit/c7257e716f)] - **dgram**: remove this aliases (cjihrig) [#11243](https://github.com/nodejs/node/pull/11243) +* [[`227cc1e810`](https://github.com/nodejs/node/commit/227cc1e810)] - **doc**: restrict the ES.Next features usage in tests (DavidCai) [#11452](https://github.com/nodejs/node/pull/11452) +* [[`23246768fb`](https://github.com/nodejs/node/commit/23246768fb)] - **doc**: add missing entry in v6 changelog table (Luigi Pinca) [#11534](https://github.com/nodejs/node/pull/11534) +* [[`ff9a86a73e`](https://github.com/nodejs/node/commit/ff9a86a73e)] - **doc**: remove Chris Dickinson from active releasers (Ben Noordhuis) [#11011](https://github.com/nodejs/node/pull/11011) +* [[`313d1a3009`](https://github.com/nodejs/node/commit/313d1a3009)] - **doc**: for style, remove "isn't" contraction (Sam Roberts) [#10981](https://github.com/nodejs/node/pull/10981) +* [[`ab7587ed6c`](https://github.com/nodejs/node/commit/ab7587ed6c)] - **doc**: update http.md for consistency and clarity (Lance Ball) [#10715](https://github.com/nodejs/node/pull/10715) +* [[`21a94ab78c`](https://github.com/nodejs/node/commit/21a94ab78c)] - **doc**: clarify Buffer.indexOf/lastIndexOf edge cases (dcposch@dcpos.ch) [#10162](https://github.com/nodejs/node/pull/10162) +* [[`8c487de736`](https://github.com/nodejs/node/commit/8c487de736)] - **doc**: document argument variant in the repl.md (Vse Mozhet Byt) [#10221](https://github.com/nodejs/node/pull/10221) +* [[`130710476b`](https://github.com/nodejs/node/commit/130710476b)] - **doc**: DEFAULT_ECDH_CURVE was added in 0.11.13 (Sam Roberts) [#10983](https://github.com/nodejs/node/pull/10983) +* [[`5118e05b15`](https://github.com/nodejs/node/commit/5118e05b15)] - **doc**: HTTP response getHeader doc fix (Faiz Halde) [#10817](https://github.com/nodejs/node/pull/10817) +* [[`243652abbe`](https://github.com/nodejs/node/commit/243652abbe)] - **doc**: remove duplicate properties bullet in readme (Javis Sullivan) [#10741](https://github.com/nodejs/node/pull/10741) +* [[`fa8a394e51`](https://github.com/nodejs/node/commit/fa8a394e51)] - **doc**: specify sorted requires in tests (Sam Roberts) [#10716](https://github.com/nodejs/node/pull/10716) +* [[`1660311056`](https://github.com/nodejs/node/commit/1660311056)] - **doc**: fix typo in http.md (Peter Mescalchin) [#10975](https://github.com/nodejs/node/pull/10975) +* [[`8936814a70`](https://github.com/nodejs/node/commit/8936814a70)] - **doc**: add who to CC list for dgram (cjihrig) [#11035](https://github.com/nodejs/node/pull/11035) +* [[`b934058128`](https://github.com/nodejs/node/commit/b934058128)] - **doc**: correct and complete dgram's Socket.bind docs (Alex Jordan) [#11025](https://github.com/nodejs/node/pull/11025) +* [[`faa55fbe09`](https://github.com/nodejs/node/commit/faa55fbe09)] - **doc**: edit CONTRIBUTING.md for clarity (Rich Trott) [#11045](https://github.com/nodejs/node/pull/11045) +* [[`c26258e1fd`](https://github.com/nodejs/node/commit/c26258e1fd)] - **doc**: fix confusing example in dns.md (Vse Mozhet Byt) [#11022](https://github.com/nodejs/node/pull/11022) +* [[`8bf7f9f202`](https://github.com/nodejs/node/commit/8bf7f9f202)] - **doc**: add personal pronouns option (Rich Trott) [#11089](https://github.com/nodejs/node/pull/11089) +* [[`7c22a52a74`](https://github.com/nodejs/node/commit/7c22a52a74)] - **doc**: clarify msg when doc/api/cli.md not updated (Stewart X Addison) [#10872](https://github.com/nodejs/node/pull/10872) +* [[`d404d8b673`](https://github.com/nodejs/node/commit/d404d8b673)] - **doc**: edit stability text for clarity and style (Rich Trott) [#11112](https://github.com/nodejs/node/pull/11112) +* [[`38938e1ba9`](https://github.com/nodejs/node/commit/38938e1ba9)] - **doc**: remove assertions about assert (Rich Trott) [#11113](https://github.com/nodejs/node/pull/11113) +* [[`89d30908f2`](https://github.com/nodejs/node/commit/89d30908f2)] - **doc**: fix "initial delay" link in http.md (Timo Tijhof) [#11108](https://github.com/nodejs/node/pull/11108) +* [[`c0072f8d71`](https://github.com/nodejs/node/commit/c0072f8d71)] - **doc**: typographical fixes in COLLABORATOR_GUIDE.md (Anna Henningsen) [#11163](https://github.com/nodejs/node/pull/11163) +* [[`207142d050`](https://github.com/nodejs/node/commit/207142d050)] - **doc**: add not-an-aardvark as ESLint contact (Rich Trott) [#11169](https://github.com/nodejs/node/pull/11169) +* [[`3746eee19d`](https://github.com/nodejs/node/commit/3746eee19d)] - **doc**: improve testing guide (Joyee Cheung) [#11150](https://github.com/nodejs/node/pull/11150) +* [[`6cadc7160f`](https://github.com/nodejs/node/commit/6cadc7160f)] - **doc**: remove extraneous paragraph from assert doc (Rich Trott) [#11174](https://github.com/nodejs/node/pull/11174) +* [[`d5d8a8d7b5`](https://github.com/nodejs/node/commit/d5d8a8d7b5)] - **doc**: fix typo in dgram doc (Rich Trott) [#11186](https://github.com/nodejs/node/pull/11186) +* [[`59a1d00906`](https://github.com/nodejs/node/commit/59a1d00906)] - **doc**: add and fix System Error properties (Daiki Arai) [#10986](https://github.com/nodejs/node/pull/10986) +* [[`72adba4317`](https://github.com/nodejs/node/commit/72adba4317)] - **doc**: add links between cork() and uncork() (Matteo Collina) [#11222](https://github.com/nodejs/node/pull/11222) +* [[`1cd526c253`](https://github.com/nodejs/node/commit/1cd526c253)] - **doc**: clarify the behavior of Buffer.byteLength (Nikolai Vavilov) [#11238](https://github.com/nodejs/node/pull/11238) +* [[`b1bda165ce`](https://github.com/nodejs/node/commit/b1bda165ce)] - **doc**: edit maxBuffer/Unicode paragraph for clarity (Rich Trott) [#11228](https://github.com/nodejs/node/pull/11228) +* [[`1150af00f7`](https://github.com/nodejs/node/commit/1150af00f7)] - **doc**: improve consistency in documentation titles (Vse Mozhet Byt) [#11230](https://github.com/nodejs/node/pull/11230) +* [[`ade39cdf9c`](https://github.com/nodejs/node/commit/ade39cdf9c)] - **doc**: drop "and io.js" from release section (Ben Noordhuis) [#11054](https://github.com/nodejs/node/pull/11054) +* [[`c79d9f95d1`](https://github.com/nodejs/node/commit/c79d9f95d1)] - **doc**: update email and add personal pronoun (JungMinu) [#11318](https://github.com/nodejs/node/pull/11318) +* [[`7df4ee8d49`](https://github.com/nodejs/node/commit/7df4ee8d49)] - **doc**: update link to V8 Embedder's guide (Franziska Hinkelmann) [#11336](https://github.com/nodejs/node/pull/11336) +* [[`8468d823a8`](https://github.com/nodejs/node/commit/8468d823a8)] - **doc**: update code examples in domain.md (Vse Mozhet Byt) [#11110](https://github.com/nodejs/node/pull/11110) +* [[`10a497cdcb`](https://github.com/nodejs/node/commit/10a497cdcb)] - **doc**: describe when stdout/err is sync (Sam Roberts) [#10884](https://github.com/nodejs/node/pull/10884) +* [[`53d5002ef9`](https://github.com/nodejs/node/commit/53d5002ef9)] - **doc**: dns examples implied string args were arrays (Sam Roberts) [#11350](https://github.com/nodejs/node/pull/11350) +* [[`42304de4f7`](https://github.com/nodejs/node/commit/42304de4f7)] - **doc**: change STYLE-GUIDE to STYLE_GUIDE (Dean Coakley) [#11460](https://github.com/nodejs/node/pull/11460) +* [[`13a9ba9523`](https://github.com/nodejs/node/commit/13a9ba9523)] - **doc**: add STYLE_GUIDE (moved from nodejs/docs) (Gibson Fahnestock) [#11321](https://github.com/nodejs/node/pull/11321) +* [[`0164d9263e`](https://github.com/nodejs/node/commit/0164d9263e)] - **doc**: improve test/README.md (Joyee Cheung) [#11237](https://github.com/nodejs/node/pull/11237) +* [[`e0868aa529`](https://github.com/nodejs/node/commit/e0868aa529)] - **doc**: add comment for net.Server's error event (QianJin2013) [#11136](https://github.com/nodejs/node/pull/11136) +* [[`9a684a1511`](https://github.com/nodejs/node/commit/9a684a1511)] - **doc**: note message event listeners ref IPC channels (Diego Rodríguez Baquero) [#11494](https://github.com/nodejs/node/pull/11494) +* [[`bfa3989584`](https://github.com/nodejs/node/commit/bfa3989584)] - **doc**: argument types for assert methods (Amelia Clarke) [#11548](https://github.com/nodejs/node/pull/11548) +* [[`fc41a1d34d`](https://github.com/nodejs/node/commit/fc41a1d34d)] - **doc**: document clientRequest.aborted (Zach Bjornson) [#11544](https://github.com/nodejs/node/pull/11544) +* [[`ff77425eba`](https://github.com/nodejs/node/commit/ff77425eba)] - **doc**: link to readable and writeable stream section (Sebastian Van Sande) [#11517](https://github.com/nodejs/node/pull/11517) +* [[`4850b503dd`](https://github.com/nodejs/node/commit/4850b503dd)] - **doc**: update TheAlphaNerd to MylesBorins (Myles Borins) [#10586](https://github.com/nodejs/node/pull/10586) +* [[`d04de226a1`](https://github.com/nodejs/node/commit/d04de226a1)] - **doc**: update examples in api/crypto.md (Vse Mozhet Byt) [#10909](https://github.com/nodejs/node/pull/10909) +* [[`a045af3b95`](https://github.com/nodejs/node/commit/a045af3b95)] - **doc**: update AUTHORS list to fix name (Noah Rose Ledesma) [#10945](https://github.com/nodejs/node/pull/10945) +* [[`d266759b99`](https://github.com/nodejs/node/commit/d266759b99)] - **doc**: add TimothyGu to collaborators (Timothy Gu) [#10954](https://github.com/nodejs/node/pull/10954) +* [[`42a5989b39`](https://github.com/nodejs/node/commit/42a5989b39)] - **doc**: mention moderation repo in onboarding doc (Anna Henningsen) [#10869](https://github.com/nodejs/node/pull/10869) +* [[`cdc981f6e1`](https://github.com/nodejs/node/commit/cdc981f6e1)] - **doc**: add edsadr to collaborators (Adrian Estrada) [#10883](https://github.com/nodejs/node/pull/10883) +* [[`787d4ec197`](https://github.com/nodejs/node/commit/787d4ec197)] - **doc**: clarifying variables in fs.write() (Jessica Quynh Tran) [#9792](https://github.com/nodejs/node/pull/9792) +* [[`f48c86ce48`](https://github.com/nodejs/node/commit/f48c86ce48)] - **doc**: add links for zlib convenience methods (Anna Henningsen) [#10829](https://github.com/nodejs/node/pull/10829) +* [[`1dbb366611`](https://github.com/nodejs/node/commit/1dbb366611)] - **doc**: add missing `added:` tag for `zlib.constants` (Anna Henningsen) [#10826](https://github.com/nodejs/node/pull/10826) +* [[`867b4d87dc`](https://github.com/nodejs/node/commit/867b4d87dc)] - **doc**: fix broken internal link in process.md (Anna Henningsen) [#10828](https://github.com/nodejs/node/pull/10828) +* [[`6d726c07aa`](https://github.com/nodejs/node/commit/6d726c07aa)] - **doc**: update writable.write return value (Nathan Phillip Brink) [#10582](https://github.com/nodejs/node/pull/10582) +* [[`1975f82168`](https://github.com/nodejs/node/commit/1975f82168)] - **doc**: edit writing-tests.md (Rich Trott) [#10585](https://github.com/nodejs/node/pull/10585) +* [[`494ee5163f`](https://github.com/nodejs/node/commit/494ee5163f)] - **doc**: fix misleading language in vm docs (Alexey Orlenko) [#10708](https://github.com/nodejs/node/pull/10708) +* [[`8e807f6552`](https://github.com/nodejs/node/commit/8e807f6552)] - **doc**: mention cc-ing nodejs/url team for reviews (Anna Henningsen) [#10652](https://github.com/nodejs/node/pull/10652) +* [[`f9bd4a5645`](https://github.com/nodejs/node/commit/f9bd4a5645)] - **doc**: sort require statements in tests (Sam Roberts) [#10616](https://github.com/nodejs/node/pull/10616) +* [[`032d73841d`](https://github.com/nodejs/node/commit/032d73841d)] - **doc**: handle backpressure when write() return false (Matteo Collina) [#10631](https://github.com/nodejs/node/pull/10631) +* [[`af991c7a98`](https://github.com/nodejs/node/commit/af991c7a98)] - **doc**: add test naming information to guide (Rich Trott) [#10584](https://github.com/nodejs/node/pull/10584) +* [[`b5fd61d77a`](https://github.com/nodejs/node/commit/b5fd61d77a)] - **doc**: fix missing negation in stream.md (Johannes Rieken) [#10712](https://github.com/nodejs/node/pull/10712) +* [[`7e5a59e6fc`](https://github.com/nodejs/node/commit/7e5a59e6fc)] - **doc**: "s/git apply/git am -3" in V8 guide (Myles Borins) [#10665](https://github.com/nodejs/node/pull/10665) +* [[`789bafd693`](https://github.com/nodejs/node/commit/789bafd693)] - **doc**: update LTS info for current releases (Evan Lucas) [#10720](https://github.com/nodejs/node/pull/10720) +* [[`fef978584a`](https://github.com/nodejs/node/commit/fef978584a)] - **doc**: update BUILDING.md (Lukasz Gasior) [#10669](https://github.com/nodejs/node/pull/10669) +* [[`f2ddc72b62`](https://github.com/nodejs/node/commit/f2ddc72b62)] - **doc**: document use of Refs: for references (Gibson Fahnestock) [#10670](https://github.com/nodejs/node/pull/10670) +* [[`0a1d15fba6`](https://github.com/nodejs/node/commit/0a1d15fba6)] - **doc**: clarify information about ABI version (Rich Trott) [#10419](https://github.com/nodejs/node/pull/10419) +* [[`22f3813b3e`](https://github.com/nodejs/node/commit/22f3813b3e)] - **doc**: clarify the statement in vm.createContext() (AnnaMag) [#10519](https://github.com/nodejs/node/pull/10519) +* [[`38d63e49eb`](https://github.com/nodejs/node/commit/38d63e49eb)] - **doc**: improve rinfo object documentation (Matt Crummey) [#10050](https://github.com/nodejs/node/pull/10050) +* [[`998fd1e7e1`](https://github.com/nodejs/node/commit/998fd1e7e1)] - **doc**: add tls.DEFAULT_ECDH_CURVE (Sam Roberts) [#10264](https://github.com/nodejs/node/pull/10264) +* [[`4995a819e0`](https://github.com/nodejs/node/commit/4995a819e0)] - **doc**: fix a wrong note in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`6d3c2d6212`](https://github.com/nodejs/node/commit/6d3c2d6212)] - **doc**: fix examples in buffer.md to avoid confusion (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`020c90eb2d`](https://github.com/nodejs/node/commit/020c90eb2d)] - **doc**: remove a wrong remark in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`8af811f90e`](https://github.com/nodejs/node/commit/8af811f90e)] - **doc**: fix copy-paste artifacts in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`a2b40ad6a4`](https://github.com/nodejs/node/commit/a2b40ad6a4)] - **doc**: fix wrong function arguments in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`e94abaec1c`](https://github.com/nodejs/node/commit/e94abaec1c)] - **doc**: fix a syntax error in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`b36c315423`](https://github.com/nodejs/node/commit/b36c315423)] - **doc**: var =\> const/let in the buffer.md (Vse Mozhet Byt) [#9795](https://github.com/nodejs/node/pull/9795) +* [[`b503824b81`](https://github.com/nodejs/node/commit/b503824b81)] - **doc,test**: args to `buffer.copy` can be Uint8Arrays (Anna Henningsen) [#11486](https://github.com/nodejs/node/pull/11486) +* [[`c8d2ca7a78`](https://github.com/nodejs/node/commit/c8d2ca7a78)] - **fs**: improve performance for sync stat() functions (Brian White) [#11522](https://github.com/nodejs/node/pull/11522) +* [[`b4dc7a778f`](https://github.com/nodejs/node/commit/b4dc7a778f)] - **http**: make request.abort() destroy the socket (Luigi Pinca) [#10818](https://github.com/nodejs/node/pull/10818) +* [[`d777da27bc`](https://github.com/nodejs/node/commit/d777da27bc)] - **http**: reject control characters in http.request() (Ben Noordhuis) [#8923](https://github.com/nodejs/node/pull/8923) +* [[`bad0d9367e`](https://github.com/nodejs/node/commit/bad0d9367e)] - **http**: add debug message for invalid header value (Evan Lucas) [#9195](https://github.com/nodejs/node/pull/9195) +* [[`bde1a7e09e`](https://github.com/nodejs/node/commit/bde1a7e09e)] - **lib**: remove unnecessary parameter for assertCrypto() (Jackson Tian) [#10834](https://github.com/nodejs/node/pull/10834) +* [[`a2aa2f7de4`](https://github.com/nodejs/node/commit/a2aa2f7de4)] - **lib**: refactor bootstrap_node.js regular expression (Rich Trott) [#10749](https://github.com/nodejs/node/pull/10749) +* [[`797d9ee924`](https://github.com/nodejs/node/commit/797d9ee924)] - **lib**: refactor crypto cipher/hash/curve getters (Rich Trott) [#10682](https://github.com/nodejs/node/pull/10682) +* [[`69327f5e72`](https://github.com/nodejs/node/commit/69327f5e72)] - **lib**: rename kMaxCallbacksUntilQueueIsShortened (JungMinu) [#11473](https://github.com/nodejs/node/pull/11473) +* [[`a6b2dfa43c`](https://github.com/nodejs/node/commit/a6b2dfa43c)] - **lib**: add constant kMaxCallbacksUntilQueueIsShortened (Daniel Bevenius) [#11199](https://github.com/nodejs/node/pull/11199) +* [[`a3ad63b9b3`](https://github.com/nodejs/node/commit/a3ad63b9b3)] - **lib,src**: support values \> 4GB in heap statistics (Ben Noordhuis) [#10186](https://github.com/nodejs/node/pull/10186) +* [[`8b5dd35ae8`](https://github.com/nodejs/node/commit/8b5dd35ae8)] - **meta**: add explicit deprecation and semver-major policy (James M Snell) [#7964](https://github.com/nodejs/node/pull/7964) +* [[`4df850ba59`](https://github.com/nodejs/node/commit/4df850ba59)] - **meta**: remove Chris Dickinson from CTC (Chris Dickinson) [#11267](https://github.com/nodejs/node/pull/11267) +* [[`8863360a21`](https://github.com/nodejs/node/commit/8863360a21)] - **meta**: adding Italo A. Casas PGP Fingerprint (Italo A. Casas) [#11202](https://github.com/nodejs/node/pull/11202) +* [[`8287d03adf`](https://github.com/nodejs/node/commit/8287d03adf)] - **meta**: decharter the http working group (James M Snell) [#10604](https://github.com/nodejs/node/pull/10604) +* [[`742ec6213f`](https://github.com/nodejs/node/commit/742ec6213f)] - **net**: prefer === to == (Arseniy Maximov) [#11513](https://github.com/nodejs/node/pull/11513) +* [[`5bfa43d8f0`](https://github.com/nodejs/node/commit/5bfa43d8f0)] - **os**: improve loadavg() performance (Brian White) [#11516](https://github.com/nodejs/node/pull/11516) +* [[`b7088a9355`](https://github.com/nodejs/node/commit/b7088a9355)] - **process**: improve memoryUsage() performance (Brian White) [#11497](https://github.com/nodejs/node/pull/11497) +* [[`02e5f5c57e`](https://github.com/nodejs/node/commit/02e5f5c57e)] - **process**: fix typo in comments (levsthings) [#11503](https://github.com/nodejs/node/pull/11503) +* [[`db45bf850a`](https://github.com/nodejs/node/commit/db45bf850a)] - **querystring**: improve unescapeBuffer performance (Brian White) [#10837](https://github.com/nodejs/node/pull/10837) +* [[`32cdbca2dc`](https://github.com/nodejs/node/commit/32cdbca2dc)] - **querystring**: improve stringify() performance (Brian White) [#10852](https://github.com/nodejs/node/pull/10852) +* [[`23f3f20963`](https://github.com/nodejs/node/commit/23f3f20963)] - **querystring**: improve parse() performance (Brian White) [#10874](https://github.com/nodejs/node/pull/10874) +* [[`dc88b6572d`](https://github.com/nodejs/node/commit/dc88b6572d)] - **readline**: refactor construct Interface (Jackson Tian) [#4740](https://github.com/nodejs/node/pull/4740) +* [[`f7c6ad2df9`](https://github.com/nodejs/node/commit/f7c6ad2df9)] - **readline**: update 6 comparions to strict (Umair Ishaq) [#11078](https://github.com/nodejs/node/pull/11078) +* [[`b5a0d46c55`](https://github.com/nodejs/node/commit/b5a0d46c55)] - **src**: add NODE_NO_WARNINGS to --help output (cjihrig) [#10918](https://github.com/nodejs/node/pull/10918) +* [[`566e2fea48`](https://github.com/nodejs/node/commit/566e2fea48)] - **src**: remove unnecessary req_wrap_obj (Daniel Bevenius) [#10942](https://github.com/nodejs/node/pull/10942) +* [[`c7436df889`](https://github.com/nodejs/node/commit/c7436df889)] - **src**: add a missing space in node_os.cc (Alexey Orlenko) [#10931](https://github.com/nodejs/node/pull/10931) +* [[`4358c6096c`](https://github.com/nodejs/node/commit/4358c6096c)] - **src**: enable writev for pipe handles on Unix (Alexey Orlenko) [#10677](https://github.com/nodejs/node/pull/10677) +* [[`28102edbc8`](https://github.com/nodejs/node/commit/28102edbc8)] - **src**: unconsume stream fix in internal http impl (Roee Kasher) [#11015](https://github.com/nodejs/node/pull/11015) +* [[`587857e301`](https://github.com/nodejs/node/commit/587857e301)] - **src**: fix delete operator on vm context (Franziska Hinkelmann) [#11266](https://github.com/nodejs/node/pull/11266) +* [[`b7cbb8002c`](https://github.com/nodejs/node/commit/b7cbb8002c)] - **src**: support UTF-8 in compiled-in JS source files (Ben Noordhuis) [#11129](https://github.com/nodejs/node/pull/11129) +* [[`ce01372b68`](https://github.com/nodejs/node/commit/ce01372b68)] - **src**: remove unused typedef (Ben Noordhuis) [#11322](https://github.com/nodejs/node/pull/11322) +* [[`1dddfeccb2`](https://github.com/nodejs/node/commit/1dddfeccb2)] - **src**: remove usage of deprecated debug API (Yang Guo) [#11437](https://github.com/nodejs/node/pull/11437) +* [[`7f273c6f6e`](https://github.com/nodejs/node/commit/7f273c6f6e)] - **src**: update http-parser link (Daniel Bevenius) [#11477](https://github.com/nodejs/node/pull/11477) +* [[`214b514efe`](https://github.com/nodejs/node/commit/214b514efe)] - **src**: use ABORT() macro instead of abort() (Evan Lucas) [#9613](https://github.com/nodejs/node/pull/9613) +* [[`412f380903`](https://github.com/nodejs/node/commit/412f380903)] - **stream**: move legacy to lib/internal dir (yorkie) [#8197](https://github.com/nodejs/node/pull/8197) +* [[`336f1bd842`](https://github.com/nodejs/node/commit/336f1bd842)] - **test**: increase setMulticastLoopback() coverage (cjihrig) [#11277](https://github.com/nodejs/node/pull/11277) +* [[`b29165f249`](https://github.com/nodejs/node/commit/b29165f249)] - **test**: increase dgram ref()/unref() coverage (cjihrig) [#11240](https://github.com/nodejs/node/pull/11240) +* [[`22d4ed2484`](https://github.com/nodejs/node/commit/22d4ed2484)] - **test**: add an exception test to http-write-head (Yuta Hiroto) [#11034](https://github.com/nodejs/node/pull/11034) +* [[`9edd342e81`](https://github.com/nodejs/node/commit/9edd342e81)] - **test**: add known_issues test for #10223 (AnnaMag) [#11024](https://github.com/nodejs/node/pull/11024) +* [[`646f82520c`](https://github.com/nodejs/node/commit/646f82520c)] - **test**: guarantee test runs in test-readline-keys (Rich Trott) [#11023](https://github.com/nodejs/node/pull/11023) +* [[`d8eed12d31`](https://github.com/nodejs/node/commit/d8eed12d31)] - **test**: check error message in test-http-outgoing-proto (Alex Ling) [#10943](https://github.com/nodejs/node/pull/10943) +* [[`174bef182a`](https://github.com/nodejs/node/commit/174bef182a)] - **test**: increase coverage for stream's duplex (abouthiroppy) [#10963](https://github.com/nodejs/node/pull/10963) +* [[`8ff15a262d`](https://github.com/nodejs/node/commit/8ff15a262d)] - **test**: allow for slow hosts in spawnSync() test (Rich Trott) [#10998](https://github.com/nodejs/node/pull/10998) +* [[`62f6749cd6`](https://github.com/nodejs/node/commit/62f6749cd6)] - **test**: expand test coverage of fs.js (Vinícius do Carmo) [#10947](https://github.com/nodejs/node/pull/10947) +* [[`5cea2239d8`](https://github.com/nodejs/node/commit/5cea2239d8)] - **test**: expand test coverage of events.js (Vinícius do Carmo) [#10947](https://github.com/nodejs/node/pull/10947) +* [[`a1751864e2`](https://github.com/nodejs/node/commit/a1751864e2)] - **test**: check noAssert option in buf.write*() (larissayvette) [#10790](https://github.com/nodejs/node/pull/10790) +* [[`0b5f2b45f9`](https://github.com/nodejs/node/commit/0b5f2b45f9)] - **test**: expand test coverage of fs.js (Vinícius do Carmo) [#10972](https://github.com/nodejs/node/pull/10972) +* [[`d9362efb6c`](https://github.com/nodejs/node/commit/d9362efb6c)] - **test**: enhance test-timers (Rich Trott) [#10960](https://github.com/nodejs/node/pull/10960) +* [[`b9615b3abc`](https://github.com/nodejs/node/commit/b9615b3abc)] - **test**: increase coverage for exec() functions (cjihrig) [#10919](https://github.com/nodejs/node/pull/10919) +* [[`b45280671a`](https://github.com/nodejs/node/commit/b45280671a)] - **test**: add process.assert's test (abouthiroppy) [#10911](https://github.com/nodejs/node/pull/10911) +* [[`6584ea0715`](https://github.com/nodejs/node/commit/6584ea0715)] - **test**: update Buffer.lastIndexOf (dcposch@dcpos.ch) [#10162](https://github.com/nodejs/node/pull/10162) +* [[`0c60540014`](https://github.com/nodejs/node/commit/0c60540014)] - **test**: improve code in test-crypto-verify (Adrian Estrada) [#10845](https://github.com/nodejs/node/pull/10845) +* [[`2a52a68a96`](https://github.com/nodejs/node/commit/2a52a68a96)] - **test**: add dgram.Socket.prototype.bind's test (abouthiroppy) [#10894](https://github.com/nodejs/node/pull/10894) +* [[`2494d8ac68`](https://github.com/nodejs/node/commit/2494d8ac68)] - **test**: update V8 flag in test (Franziska Hinkelmann) [#10917](https://github.com/nodejs/node/pull/10917) +* [[`9ac22cdcaf`](https://github.com/nodejs/node/commit/9ac22cdcaf)] - **test**: increase coverage of string-decoder (abouthiroppy) [#10863](https://github.com/nodejs/node/pull/10863) +* [[`d766f5e0ad`](https://github.com/nodejs/node/commit/d766f5e0ad)] - **test**: improving coverage of dns-lookup (abouthiroppy) [#10844](https://github.com/nodejs/node/pull/10844) +* [[`8f984c3a8a`](https://github.com/nodejs/node/commit/8f984c3a8a)] - **test**: refactor test-fs-read-zero-length.js (abouthiroppy) [#10729](https://github.com/nodejs/node/pull/10729) +* [[`c0e24f9029`](https://github.com/nodejs/node/commit/c0e24f9029)] - **test**: improving coverage for dgram (abouthiroppy) [#10783](https://github.com/nodejs/node/pull/10783) +* [[`c91d873115`](https://github.com/nodejs/node/commit/c91d873115)] - **test**: improve code in test-console-instance (Adrian Estrada) [#10813](https://github.com/nodejs/node/pull/10813) +* [[`a434f451d9`](https://github.com/nodejs/node/commit/a434f451d9)] - **test**: improve code in test-domain-multi (Adrian Estrada) [#10798](https://github.com/nodejs/node/pull/10798) +* [[`b01db3a73f`](https://github.com/nodejs/node/commit/b01db3a73f)] - **test**: improve test-stream2-large-read-stall (stefan judis) [#10725](https://github.com/nodejs/node/pull/10725) +* [[`76f0556c4a`](https://github.com/nodejs/node/commit/76f0556c4a)] - **test**: improve code in test-http-host-headers (Adrian Estrada) [#10830](https://github.com/nodejs/node/pull/10830) +* [[`c740cb6667`](https://github.com/nodejs/node/commit/c740cb6667)] - **test**: add test case to test-http-response-statuscode.js (abouthiroppy) [#10808](https://github.com/nodejs/node/pull/10808) +* [[`872354563c`](https://github.com/nodejs/node/commit/872354563c)] - **test**: refactor cluster-preload.js (abouthiroppy) [#10701](https://github.com/nodejs/node/pull/10701) +* [[`04dc1cdfcb`](https://github.com/nodejs/node/commit/04dc1cdfcb)] - **test**: improve test-fs-write-file-sync (Adrian Estrada) [#10624](https://github.com/nodejs/node/pull/10624) +* [[`0d25d056a4`](https://github.com/nodejs/node/commit/0d25d056a4)] - **test**: test hmac binding robustness (Sam Roberts) [#10923](https://github.com/nodejs/node/pull/10923) +* [[`99a234c97e`](https://github.com/nodejs/node/commit/99a234c97e)] - **test**: refactor the code in test-fs-watch.js (sivaprasanna) [#10357](https://github.com/nodejs/node/pull/10357) +* [[`c13f01c94d`](https://github.com/nodejs/node/commit/c13f01c94d)] - **test**: reduce unmanaged parallelism in domain test (Joyee Cheung) [#10329](https://github.com/nodejs/node/pull/10329) +* [[`ed76b4a8e9`](https://github.com/nodejs/node/commit/ed76b4a8e9)] - **test**: add dgram.Socket.prototype.sendto's test (abouthiroppy) [#10901](https://github.com/nodejs/node/pull/10901) +* [[`5365501a2f`](https://github.com/nodejs/node/commit/5365501a2f)] - **test**: add regression test for V8 parse error (Michaël Zasso) [#11483](https://github.com/nodejs/node/pull/11483) +* [[`b5fb9f4098`](https://github.com/nodejs/node/commit/b5fb9f4098)] - **test**: increase timeout in break-on-uncaught (Sakthipriyan Vairamani (thefourtheye)) [#10822](https://github.com/nodejs/node/pull/10822) +* [[`443dd508d2`](https://github.com/nodejs/node/commit/443dd508d2)] - **test**: fix process.title expectation (Sakthipriyan Vairamani (thefourtheye)) [#10597](https://github.com/nodejs/node/pull/10597) +* [[`ae338daf06`](https://github.com/nodejs/node/commit/ae338daf06)] - **test**: refactor test-debugger-remote (Sakthipriyan Vairamani (thefourtheye)) [#10455](https://github.com/nodejs/node/pull/10455) +* [[`34e0bc6d16`](https://github.com/nodejs/node/commit/34e0bc6d16)] - **test**: fix and improve debugger-client test (Sakthipriyan Vairamani (thefourtheye)) [#10371](https://github.com/nodejs/node/pull/10371) +* [[`da874590a6`](https://github.com/nodejs/node/commit/da874590a6)] - **test**: improve test-assert (richnologies) [#10916](https://github.com/nodejs/node/pull/10916) +* [[`a15ecd269d`](https://github.com/nodejs/node/commit/a15ecd269d)] - **test**: increase coverage for punycode's decode (abouthiroppy) [#10940](https://github.com/nodejs/node/pull/10940) +* [[`98e32db207`](https://github.com/nodejs/node/commit/98e32db207)] - **test**: check fd 0,1,2 are used, not access mode (John Barboza) [#10339](https://github.com/nodejs/node/pull/10339) +* [[`e59697c695`](https://github.com/nodejs/node/commit/e59697c695)] - **test**: fix flaky test-regress-GH-897 (Rich Trott) [#10903](https://github.com/nodejs/node/pull/10903) +* [[`a08c7f6d87`](https://github.com/nodejs/node/commit/a08c7f6d87)] - **test**: don't connect to :: (use localhost instead) (Gibson Fahnestock) [#10854](https://github.com/nodejs/node/pull/10854) +* [[`ca53866333`](https://github.com/nodejs/node/commit/ca53866333)] - **test**: add message verification on assert.throws (Travis Meisenheimer) [#10890](https://github.com/nodejs/node/pull/10890) +* [[`38b123c918`](https://github.com/nodejs/node/commit/38b123c918)] - **test**: refactor test-repl-tab-complete (Rich Trott) [#10879](https://github.com/nodejs/node/pull/10879) +* [[`68fc4d3a1c`](https://github.com/nodejs/node/commit/68fc4d3a1c)] - **test**: simplify array initialization (Rich Trott) [#10860](https://github.com/nodejs/node/pull/10860) +* [[`a26d752e77`](https://github.com/nodejs/node/commit/a26d752e77)] - **test**: add http-common's test (abouthiroppy) [#10832](https://github.com/nodejs/node/pull/10832) +* [[`80e2ff9bff`](https://github.com/nodejs/node/commit/80e2ff9bff)] - **test**: tests for _readableStream.awaitDrain (Mark) [#8914](https://github.com/nodejs/node/pull/8914) +* [[`e4e9f675d2`](https://github.com/nodejs/node/commit/e4e9f675d2)] - **test**: improve the code in test-process-cpuUsage (Adrian Estrada) [#10714](https://github.com/nodejs/node/pull/10714) +* [[`73c0c46cf2`](https://github.com/nodejs/node/commit/73c0c46cf2)] - **test**: increase test-crypto.js strictness (Rich Trott) [#10784](https://github.com/nodejs/node/pull/10784) +* [[`e316fafbd4`](https://github.com/nodejs/node/commit/e316fafbd4)] - **test**: delete duplicate test of noAssert in readUInt* (larissayvette) [#10791](https://github.com/nodejs/node/pull/10791) +* [[`896fb63173`](https://github.com/nodejs/node/commit/896fb63173)] - **test**: add http_incoming's matchKnownFields test (abouthiroppy) [#10811](https://github.com/nodejs/node/pull/10811) +* [[`c086bdc2de`](https://github.com/nodejs/node/commit/c086bdc2de)] - **test**: check error msg test-writeint.js (Irene Li) [#10755](https://github.com/nodejs/node/pull/10755) +* [[`2eb0c25aa1`](https://github.com/nodejs/node/commit/2eb0c25aa1)] - **test**: no unused args test-fs-watch-file.js (istinson) [#10758](https://github.com/nodejs/node/pull/10758) +* [[`2f026f6668`](https://github.com/nodejs/node/commit/2f026f6668)] - **test**: improve tests in pummel/test-exec (Chase Starr) [#10757](https://github.com/nodejs/node/pull/10757) +* [[`93877c87cc`](https://github.com/nodejs/node/commit/93877c87cc)] - **test**: fix temp-dir option in tools/test.py (Gibson Fahnestock) [#10723](https://github.com/nodejs/node/pull/10723) +* [[`0f3677dd5d`](https://github.com/nodejs/node/commit/0f3677dd5d)] - **test**: use realpath for NODE_TEST_DIR in common.js (Gibson Fahnestock) [#10723](https://github.com/nodejs/node/pull/10723) +* [[`5d0cc617bb`](https://github.com/nodejs/node/commit/5d0cc617bb)] - **test**: move resource intensive test to sequential (Rich Trott) [#10744](https://github.com/nodejs/node/pull/10744) +* [[`cd4bb067ad`](https://github.com/nodejs/node/commit/cd4bb067ad)] - **test**: add test for noAssert option in buf.read*() (larissayvette) [#10713](https://github.com/nodejs/node/pull/10713) +* [[`5b55689b2c`](https://github.com/nodejs/node/commit/5b55689b2c)] - **test**: refactor test-crypto-padding-aes256 (adelmann) [#10622](https://github.com/nodejs/node/pull/10622) +* [[`119e512db3`](https://github.com/nodejs/node/commit/119e512db3)] - **test**: refactor the code of test-keep-alive.js (sivaprasanna) [#10684](https://github.com/nodejs/node/pull/10684) +* [[`ef3d889ee7`](https://github.com/nodejs/node/commit/ef3d889ee7)] - **test**: validate 'expected' argument to mustCall() (Nathan Friedly) [#10692](https://github.com/nodejs/node/pull/10692) +* [[`21704a3b6b`](https://github.com/nodejs/node/commit/21704a3b6b)] - **test**: fix misplaced ) in http response statuscode test (Nathan Friedly) [#10692](https://github.com/nodejs/node/pull/10692) +* [[`8565a06b09`](https://github.com/nodejs/node/commit/8565a06b09)] - **test**: refactor test-doctool-html.js (abouthiroppy) [#10696](https://github.com/nodejs/node/pull/10696) +* [[`168f3e4bf8`](https://github.com/nodejs/node/commit/168f3e4bf8)] - **test**: improve the code in test-process-hrtime (Adrian Estrada) [#10667](https://github.com/nodejs/node/pull/10667) +* [[`9acc86f578`](https://github.com/nodejs/node/commit/9acc86f578)] - **test**: refactor test-watch-file.js (sivaprasanna) [#10679](https://github.com/nodejs/node/pull/10679) +* [[`86e39367d6`](https://github.com/nodejs/node/commit/86e39367d6)] - **test**: improve zlib-from-gzip-with-trailing-garbage (Michael Lefkowitz) [#10674](https://github.com/nodejs/node/pull/10674) +* [[`3135455cd9`](https://github.com/nodejs/node/commit/3135455cd9)] - **test**: refactor the code in test-child-process-spawn-loop.js (sivaprasanna) [#10605](https://github.com/nodejs/node/pull/10605) +* [[`f43a8765a2`](https://github.com/nodejs/node/commit/f43a8765a2)] - **test**: allow testing uid and gid separately (cjihrig) [#10647](https://github.com/nodejs/node/pull/10647) +* [[`2f1d231c0d`](https://github.com/nodejs/node/commit/2f1d231c0d)] - **test**: improve test-http-chunked-304 (Adrian Estrada) [#10462](https://github.com/nodejs/node/pull/10462) +* [[`ec8a9962ce`](https://github.com/nodejs/node/commit/ec8a9962ce)] - **test**: improve test-fs-readfile-zero-byte-liar (Adrian Estrada) [#10570](https://github.com/nodejs/node/pull/10570) +* [[`12746af524`](https://github.com/nodejs/node/commit/12746af524)] - **test**: refactor test-fs-utimes (Junshu Okamoto) [#9290](https://github.com/nodejs/node/pull/9290) +* [[`e81b1cc1ae`](https://github.com/nodejs/node/commit/e81b1cc1ae)] - **test**: provide duration/interval to timers (Rich Trott) [#9472](https://github.com/nodejs/node/pull/9472) +* [[`17a63e15e6`](https://github.com/nodejs/node/commit/17a63e15e6)] - **test**: improve test-event-emitter-modify-in-emit (Adrian Estrada) [#10600](https://github.com/nodejs/node/pull/10600) +* [[`50ee4e6dad`](https://github.com/nodejs/node/commit/50ee4e6dad)] - **test**: require handler to be run in sigwinch test (Rich Trott) [#11068](https://github.com/nodejs/node/pull/11068) +* [[`8cce29587c`](https://github.com/nodejs/node/commit/8cce29587c)] - **test**: add 2nd argument to throws in test-assert (Marlena Compton) [#11061](https://github.com/nodejs/node/pull/11061) +* [[`b14d7b3aa1`](https://github.com/nodejs/node/commit/b14d7b3aa1)] - **test**: improve error messages in test-npm-install (Gonen Dukas) [#11027](https://github.com/nodejs/node/pull/11027) +* [[`87488ba2ff`](https://github.com/nodejs/node/commit/87488ba2ff)] - **test**: add path.join's test (Yuta Hiroto) [#11063](https://github.com/nodejs/node/pull/11063) +* [[`232664a10d`](https://github.com/nodejs/node/commit/232664a10d)] - **test**: fix timing sensitivity in debugger test (Ali Ijaz Sheikh) [#11008](https://github.com/nodejs/node/pull/11008) +* [[`c16160418b`](https://github.com/nodejs/node/commit/c16160418b)] - **test**: improve coverage on removeListeners functions (matsuda-koushi) [#11140](https://github.com/nodejs/node/pull/11140) +* [[`898276b1b4`](https://github.com/nodejs/node/commit/898276b1b4)] - **test**: simplify output handling in repl tests (Rich Trott) [#11124](https://github.com/nodejs/node/pull/11124) +* [[`3248cdb2e6`](https://github.com/nodejs/node/commit/3248cdb2e6)] - **test**: improve crypto.setEngine coverage to check for errors (Sebastian Van Sande) [#11143](https://github.com/nodejs/node/pull/11143) +* [[`28111f9eb2`](https://github.com/nodejs/node/commit/28111f9eb2)] - **test**: increase specificity in dgram test (Rich Trott) [#11187](https://github.com/nodejs/node/pull/11187) +* [[`c5e8ccab63`](https://github.com/nodejs/node/commit/c5e8ccab63)] - **test**: remove obsolete comment from dgram test (ALJCepeda) [#8689](https://github.com/nodejs/node/pull/8689) +* [[`7aebc6907c`](https://github.com/nodejs/node/commit/7aebc6907c)] - **test**: improve checks in test-path-parse-format (cjihrig) [#11223](https://github.com/nodejs/node/pull/11223) +* [[`baec432c93`](https://github.com/nodejs/node/commit/baec432c93)] - **test**: add coverage for string array dgram send() (cjihrig) [#11247](https://github.com/nodejs/node/pull/11247) +* [[`6694c26420`](https://github.com/nodejs/node/commit/6694c26420)] - **test**: adapt test-debugger-pid to localized Windows (Vse Mozhet Byt) [#11270](https://github.com/nodejs/node/pull/11270) +* [[`2db4c3c453`](https://github.com/nodejs/node/commit/2db4c3c453)] - **test**: add vm module edge cases (Franziska Hinkelmann) [#11265](https://github.com/nodejs/node/pull/11265) +* [[`759604912a`](https://github.com/nodejs/node/commit/759604912a)] - **test**: refactor test-dgram-setBroadcast.js (cjihrig) [#11252](https://github.com/nodejs/node/pull/11252) +* [[`3185fa1249`](https://github.com/nodejs/node/commit/3185fa1249)] - **test**: querystring.escape with multibyte characters (Daijiro Wachi) [#11251](https://github.com/nodejs/node/pull/11251) +* [[`460a3e1f7a`](https://github.com/nodejs/node/commit/460a3e1f7a)] - **test**: improve test-assert.js (jobala) [#11193](https://github.com/nodejs/node/pull/11193) +* [[`1adfca4b5e`](https://github.com/nodejs/node/commit/1adfca4b5e)] - **test**: refactor test-repl-sigint (Rich Trott) [#11309](https://github.com/nodejs/node/pull/11309) +* [[`c539325d89`](https://github.com/nodejs/node/commit/c539325d89)] - **test**: improve punycode test coverage (Sebastian Van Sande) [#11144](https://github.com/nodejs/node/pull/11144) +* [[`8db3c770be`](https://github.com/nodejs/node/commit/8db3c770be)] - **test**: refactor test-repl-sigint-nested-eval (Rich Trott) [#11303](https://github.com/nodejs/node/pull/11303) +* [[`874ef9d312`](https://github.com/nodejs/node/commit/874ef9d312)] - **test**: add coverage for dgram _createSocketHandle() (cjihrig) [#11291](https://github.com/nodejs/node/pull/11291) +* [[`92f6919532`](https://github.com/nodejs/node/commit/92f6919532)] - **test**: improve crypto coverage (Akito Ito) [#11280](https://github.com/nodejs/node/pull/11280) +* [[`d9deb1fb62`](https://github.com/nodejs/node/commit/d9deb1fb62)] - **test**: improve message in net-connect-local-error (Rich Trott) [#11393](https://github.com/nodejs/node/pull/11393) +* [[`6677c113aa`](https://github.com/nodejs/node/commit/6677c113aa)] - **test**: refactor test-dgram-membership (Rich Trott) [#11388](https://github.com/nodejs/node/pull/11388) +* [[`e7b7d7279c`](https://github.com/nodejs/node/commit/e7b7d7279c)] - **test**: cases to querystring related to empty string (Daijiro Wachi) [#11329](https://github.com/nodejs/node/pull/11329) +* [[`5a92fc25a1`](https://github.com/nodejs/node/commit/5a92fc25a1)] - **test**: consolidate buffer.read() in a file (larissayvette) [#11297](https://github.com/nodejs/node/pull/11297) +* [[`607158ab6e`](https://github.com/nodejs/node/commit/607158ab6e)] - **test**: improve crypto coverage (樋口 彰) [#11279](https://github.com/nodejs/node/pull/11279) +* [[`27f302d94f`](https://github.com/nodejs/node/commit/27f302d94f)] - **test**: remove unused args and comparison fix (Alexander) [#11396](https://github.com/nodejs/node/pull/11396) +* [[`8da156d68f`](https://github.com/nodejs/node/commit/8da156d68f)] - **test**: add coverage for utf8CheckIncomplete() (xiaoyu) [#11419](https://github.com/nodejs/node/pull/11419) +* [[`0ddad76813`](https://github.com/nodejs/node/commit/0ddad76813)] - **test**: fix over-dependence on native promise impl (Ali Ijaz Sheikh) [#11437](https://github.com/nodejs/node/pull/11437) +* [[`34444580f6`](https://github.com/nodejs/node/commit/34444580f6)] - **test**: add test cases for path (Yuta Hiroto) [#11453](https://github.com/nodejs/node/pull/11453) +* [[`4bcf1a0387`](https://github.com/nodejs/node/commit/4bcf1a0387)] - **test**: refactor test-http-response-splitting (Arseniy Maximov) [#11429](https://github.com/nodejs/node/pull/11429) +* [[`7836807178`](https://github.com/nodejs/node/commit/7836807178)] - **test**: add error checking in callback (Rich Trott) [#11446](https://github.com/nodejs/node/pull/11446) +* [[`13b7856444`](https://github.com/nodejs/node/commit/13b7856444)] - **test**: improve coverage in test-crypto.dh (Eric Christie) [#11253](https://github.com/nodejs/node/pull/11253) +* [[`b2f7e7a5ad`](https://github.com/nodejs/node/commit/b2f7e7a5ad)] - **test**: add regex check to test-module-loading (Tarang Hirani) [#11413](https://github.com/nodejs/node/pull/11413) +* [[`6bf936644e`](https://github.com/nodejs/node/commit/6bf936644e)] - **test**: increase coverage of vm (DavidCai) [#11377](https://github.com/nodejs/node/pull/11377) +* [[`6202f14583`](https://github.com/nodejs/node/commit/6202f14583)] - **test**: throw check in test-zlib-write-after-close (Jason Wilson) [#11482](https://github.com/nodejs/node/pull/11482) +* [[`f8884dd1b5`](https://github.com/nodejs/node/commit/f8884dd1b5)] - **test**: add cases for unescape & unescapeBuffer (Daijiro Wachi) [#11326](https://github.com/nodejs/node/pull/11326) +* [[`05909d045b`](https://github.com/nodejs/node/commit/05909d045b)] - **test**: fix flaky test-vm-timeout-rethrow (Kunal Pathak) [#11530](https://github.com/nodejs/node/pull/11530) +* [[`6e5f6e3c02`](https://github.com/nodejs/node/commit/6e5f6e3c02)] - **test**: favor assertions over console logging (Rich Trott) [#11547](https://github.com/nodejs/node/pull/11547) +* [[`2c4aa39021`](https://github.com/nodejs/node/commit/2c4aa39021)] - **test**: mark test-tty-wrap as flaky for AIX (Michael Dawson) [#10618](https://github.com/nodejs/node/pull/10618) +* [[`cb03e74037`](https://github.com/nodejs/node/commit/cb03e74037)] - **test**: improve test-fs-null-bytes (Adrian Estrada) [#10521](https://github.com/nodejs/node/pull/10521) +* [[`69b55f35f7`](https://github.com/nodejs/node/commit/69b55f35f7)] - **test**: refactor test-https-truncate (Rich Trott) [#10225](https://github.com/nodejs/node/pull/10225) +* [[`ada7166dfd`](https://github.com/nodejs/node/commit/ada7166dfd)] - **test**: simplify test-http-client-unescaped-path (Rod Vagg) [#9649](https://github.com/nodejs/node/pull/9649) +* [[`1b85989fb2`](https://github.com/nodejs/node/commit/1b85989fb2)] - **test**: move long-running test to sequential (Rich Trott) [#11176](https://github.com/nodejs/node/pull/11176) +* [[`87760cc346`](https://github.com/nodejs/node/commit/87760cc346)] - **test**: add new.target add-on regression test (Ben Noordhuis) [#9689](https://github.com/nodejs/node/pull/9689) +* [[`73283060ad`](https://github.com/nodejs/node/commit/73283060ad)] - **test,repl**: add coverage for repl .clear+useGlobal (Rich Trott) [#10777](https://github.com/nodejs/node/pull/10777) +* [[`4a87aee532`](https://github.com/nodejs/node/commit/4a87aee532)] - **test,util**: remove lint workarounds (Rich Trott) [#10785](https://github.com/nodejs/node/pull/10785) +* [[`3e9ce770f7`](https://github.com/nodejs/node/commit/3e9ce770f7)] - **test-console**: streamline arrow fn and refine regex (John Maguire) [#11039](https://github.com/nodejs/node/pull/11039) +* [[`b90a141cc7`](https://github.com/nodejs/node/commit/b90a141cc7)] - **timer**: remove duplicated word in comment (asafdav2) [#11323](https://github.com/nodejs/node/pull/11323) +* [[`d71ebb90ec`](https://github.com/nodejs/node/commit/d71ebb90ec)] - **timer,domain**: maintain order of timer callbacks (John Barboza) [#10522](https://github.com/nodejs/node/pull/10522) +* [[`2a168917cb`](https://github.com/nodejs/node/commit/2a168917cb)] - **tls**: do not crash on STARTTLS when OCSP requested (Fedor Indutny) [#10706](https://github.com/nodejs/node/pull/10706) +* [[`f33684ac5f`](https://github.com/nodejs/node/commit/f33684ac5f)] - **tools**: remove custom align-function-arguments rule (Rich Trott) [#10561](https://github.com/nodejs/node/pull/10561) +* [[`fb2f449acc`](https://github.com/nodejs/node/commit/fb2f449acc)] - **tools**: update ESLint to current version (Rich Trott) [#10561](https://github.com/nodejs/node/pull/10561) +* [[`83a3aef873`](https://github.com/nodejs/node/commit/83a3aef873)] - **tools**: rename eslintrc to an undeprecated format (Sakthipriyan Vairamani) [#7699](https://github.com/nodejs/node/pull/7699) +* [[`e4f7f5c630`](https://github.com/nodejs/node/commit/e4f7f5c630)] - **tools**: add lint rule to enforce timer arguments (Rich Trott) [#9472](https://github.com/nodejs/node/pull/9472) +* [[`a13bb54466`](https://github.com/nodejs/node/commit/a13bb54466)] - **tools**: add compile_commands.json gyp generator (Ben Noordhuis) [#7986](https://github.com/nodejs/node/pull/7986) +* [[`b38d8d6e06`](https://github.com/nodejs/node/commit/b38d8d6e06)] - **tools**: suggest python2 command in configure (Roman Reiss) [#11375](https://github.com/nodejs/node/pull/11375) +* [[`291346ea51`](https://github.com/nodejs/node/commit/291346ea51)] - **tools,doc**: add Google Analytics tracking. (Phillip Johnsen) [#6601](https://github.com/nodejs/node/pull/6601) +* [[`1ed47d3f33`](https://github.com/nodejs/node/commit/1ed47d3f33)] - **tty**: avoid oob warning in TTYWrap::GetWindowSize() (Dmitry Tsvettsikh) [#11454](https://github.com/nodejs/node/pull/11454) +* [[`9e6fcbb34c`](https://github.com/nodejs/node/commit/9e6fcbb34c)] - **url**: fix surrogate handling in encodeAuth() (Timothy Gu) [#11387](https://github.com/nodejs/node/pull/11387) +* [[`53213004eb`](https://github.com/nodejs/node/commit/53213004eb)] - **util**: improve readability of normalizeEncoding (Joyee Cheung) [#10439](https://github.com/nodejs/node/pull/10439) +* [[`e54b433c8d`](https://github.com/nodejs/node/commit/e54b433c8d)] - **util**: use ES2015+ Object.is to check negative zero (Shinnosuke Watanabe) [#11332](https://github.com/nodejs/node/pull/11332) +* [[`2e15d48447`](https://github.com/nodejs/node/commit/2e15d48447)] - **v8**: drop v8::FunctionCallbackInfo\::NewTarget() (Ben Noordhuis) [#9293](https://github.com/nodejs/node/pull/9293) +* [[`fd1ffe4f5a`](https://github.com/nodejs/node/commit/fd1ffe4f5a)] - **v8**: fix --always-opt bug (Ben Noordhuis) [#9293](https://github.com/nodejs/node/pull/9293) +* [[`a55af77fc5`](https://github.com/nodejs/node/commit/a55af77fc5)] - **vm**: refactor vm module (James M Snell) [#11392](https://github.com/nodejs/node/pull/11392) + ## 2017-02-21, Version 6.10.0 'Boron' (LTS), @MylesBorins diff --git a/doc/changelogs/CHANGELOG_V7.md b/doc/changelogs/CHANGELOG_V7.md index 45dda5d2edd..b0edcc82638 100644 --- a/doc/changelogs/CHANGELOG_V7.md +++ b/doc/changelogs/CHANGELOG_V7.md @@ -6,6 +6,7 @@ +7.7.4
7.7.3
7.7.2
7.7.1
@@ -31,6 +32,66 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2017-03-21, Version 7.7.4 (Current), @cjihrig + +### Notable changes + +Thank you to @italoacasas for preparing the majority of this release. + +* **deps**: Add node-inspect 1.10.6 (Jan Krems) [#11869](https://github.com/nodejs/node/pull/11869) +* **inspector**: proper WS URLs when bound to 0.0.0.0 (Eugene Ostroukhov) [#11850](https://github.com/nodejs/node/pull/11850) +* **tls**: fix segfault on destroy after partial read. (Ben Noordhuis) [#11898](https://github.com/nodejs/node/pull/11898) + +### Commits + +* [[`f48763c5b9`](https://github.com/nodejs/node/commit/f48763c5b9)] - **benchmark**: remove benchmarks forced optimizations (Bartosz Sosnowski) +* [[`dcac2d8f04`](https://github.com/nodejs/node/commit/dcac2d8f04)] - **benchmark**: benchmark comparing forEach with for (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`80949f3d88`](https://github.com/nodejs/node/commit/80949f3d88)] - **build**: add cpp linting to windows build (liusi) [#11856](https://github.com/nodejs/node/pull/11856) +* [[`5244ee346b`](https://github.com/nodejs/node/commit/5244ee346b)] - **build**: mac OBJ_DIR should point to obj.target (Daniel Bevenius) [#11857](https://github.com/nodejs/node/pull/11857) +* [[`5b1d61ce09`](https://github.com/nodejs/node/commit/5b1d61ce09)] - **child_process**: fix deoptimizing use of arguments (Vse Mozhet Byt) [#11748](https://github.com/nodejs/node/pull/11748) +* [[`ca319862fd`](https://github.com/nodejs/node/commit/ca319862fd)] - **deps**: cherry-pick ca0f9573 from V8 upstream (Ali Ijaz Sheikh) +* [[`a7e4b029da`](https://github.com/nodejs/node/commit/a7e4b029da)] - **deps**: Add node-inspect 1.10.6 (Jan Krems) [#11869](https://github.com/nodejs/node/pull/11869) +* [[`0c00b655d8`](https://github.com/nodejs/node/commit/0c00b655d8)] - **doc**: Fix #7065: cli help documentation for --inspect (Noj Vek) [#11660](https://github.com/nodejs/node/pull/11660) +* [[`60ad7af65e`](https://github.com/nodejs/node/commit/60ad7af65e)] - **doc**: deprecate debug protocol (Jan Krems) [#10320](https://github.com/nodejs/node/pull/10320) +* [[`a5f7393541`](https://github.com/nodejs/node/commit/a5f7393541)] - **doc**: add vsemozhetbyt to collaborators (Vse Mozhet Byt) [#11932](https://github.com/nodejs/node/pull/11932) +* [[`0c091262bd`](https://github.com/nodejs/node/commit/0c091262bd)] - **doc**: add note that vm module is not a security mechanism (Ruslan Bekenev) [#11557](https://github.com/nodejs/node/pull/11557) +* [[`6d6a65e2ad`](https://github.com/nodejs/node/commit/6d6a65e2ad)] - **doc**: linkable commit message guidelines (Sam Roberts) [#11792](https://github.com/nodejs/node/pull/11792) +* [[`7c7228ed4b`](https://github.com/nodejs/node/commit/7c7228ed4b)] - **doc**: gcc version is at least 4.8.5 in BUILDING.md (detailyang) [#11840](https://github.com/nodejs/node/pull/11840) +* [[`9861ec93d4`](https://github.com/nodejs/node/commit/9861ec93d4)] - **doc**: increase Buffer.concat() documentation (cjihrig) [#11845](https://github.com/nodejs/node/pull/11845) +* [[`54879ab7d1`](https://github.com/nodejs/node/commit/54879ab7d1)] - **doc**: fix mistakes in stream doc (object mode) (Christian d'Heureuse) [#11807](https://github.com/nodejs/node/pull/11807) +* [[`78ca15dd78`](https://github.com/nodejs/node/commit/78ca15dd78)] - **doc**: argument types for dns methods (Amelia Clarke) [#11764](https://github.com/nodejs/node/pull/11764) +* [[`e84e33c87c`](https://github.com/nodejs/node/commit/e84e33c87c)] - **doc**: fix a typo in api/process.md (Gaara) [#11780](https://github.com/nodejs/node/pull/11780) +* [[`75fcf53173`](https://github.com/nodejs/node/commit/75fcf53173)] - **doc**: missing argument types for events methods (Amelia Clarke) [#11802](https://github.com/nodejs/node/pull/11802) +* [[`ae52b63df2`](https://github.com/nodejs/node/commit/ae52b63df2)] - **doc**: correct comment error in stream.md (Alexander) [#11804](https://github.com/nodejs/node/pull/11804) +* [[`e6f113d3d5`](https://github.com/nodejs/node/commit/e6f113d3d5)] - **doc**: console.log() -> console.error() in events.md (Vse Mozhet Byt) [#11810](https://github.com/nodejs/node/pull/11810) +* [[`cde5d71db1`](https://github.com/nodejs/node/commit/cde5d71db1)] - **doc**: var -> let / const in events.md (Vse Mozhet Byt) [#11810](https://github.com/nodejs/node/pull/11810) +* [[`d0fb578d64`](https://github.com/nodejs/node/commit/d0fb578d64)] - **fs**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`14e3ad0c5e`](https://github.com/nodejs/node/commit/14e3ad0c5e)] - **inspector**: proper WS URLs when bound to 0.0.0.0 (Eugene Ostroukhov) [#11850](https://github.com/nodejs/node/pull/11850) +* [[`fbbcd1aa89`](https://github.com/nodejs/node/commit/fbbcd1aa89)] - **lib**: Fix swallowed events in inspect integration (Jan Krems) [#11869](https://github.com/nodejs/node/pull/11869) +* [[`9cc712ca18`](https://github.com/nodejs/node/commit/9cc712ca18)] - **lib**: remove unused msg parameter in debug_agent (mr-spd) [#11833](https://github.com/nodejs/node/pull/11833) +* [[`77c69f7ace`](https://github.com/nodejs/node/commit/77c69f7ace)] - **lib, test**: add duplicate symbol checking in E() (DavidCai) [#11829](https://github.com/nodejs/node/pull/11829) +* [[`7e230727fc`](https://github.com/nodejs/node/commit/7e230727fc)] - **module**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`c0a2e02f51`](https://github.com/nodejs/node/commit/c0a2e02f51)] - **net**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`a0b1aa1161`](https://github.com/nodejs/node/commit/a0b1aa1161)] - **readline**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`e19ca8ba11`](https://github.com/nodejs/node/commit/e19ca8ba11)] - **readline**: remove unneeded eslint-disable comment (Rich Trott) [#11836](https://github.com/nodejs/node/pull/11836) +* [[`62e726109a`](https://github.com/nodejs/node/commit/62e726109a)] - **repl**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`90be5a1f19`](https://github.com/nodejs/node/commit/90be5a1f19)] - **stream**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`2cab00aec0`](https://github.com/nodejs/node/commit/2cab00aec0)] - **test**: fix assertion in vm test (AnnaMag) [#11862](https://github.com/nodejs/node/pull/11862) +* [[`8bda7b8d39`](https://github.com/nodejs/node/commit/8bda7b8d39)] - **test**: add coverage for child_process bounds check (Rich Trott) [#11800](https://github.com/nodejs/node/pull/11800) +* [[`3ae58acd29`](https://github.com/nodejs/node/commit/3ae58acd29)] - **test**: failing behaviour on sandboxed Proxy (AnnaMag) [#11671](https://github.com/nodejs/node/pull/11671) +* [[`560d8eed9a`](https://github.com/nodejs/node/commit/560d8eed9a)] - **test**: delay child exit in AIX for pseudo-tty tests (Gireesh Punathil) [#11715](https://github.com/nodejs/node/pull/11715) +* [[`f9c831f4b1`](https://github.com/nodejs/node/commit/f9c831f4b1)] - **test**: fix flaky test-domain-abort-on-uncaught (Rich Trott) [#11817](https://github.com/nodejs/node/pull/11817) +* [[`2649dab274`](https://github.com/nodejs/node/commit/2649dab274)] - **test**: added test for indexed properties (AnnaMag) [#11769](https://github.com/nodejs/node/pull/11769) +* [[`2df662c95a`](https://github.com/nodejs/node/commit/2df662c95a)] - **test**: test resolveObject with an empty path (Daijiro Wachi) [#11811](https://github.com/nodejs/node/pull/11811) +* [[`d2c9111614`](https://github.com/nodejs/node/commit/d2c9111614)] - **test**: fix repl-function-redefinition-edge-case (Alexey Orlenko) [#11772](https://github.com/nodejs/node/pull/11772) +* [[`c9cf922248`](https://github.com/nodejs/node/commit/c9cf922248)] - **test**: add regex to assert.throws (Matej Krajčovič) [#11815](https://github.com/nodejs/node/pull/11815) +* [[`5f6025ba68`](https://github.com/nodejs/node/commit/5f6025ba68)] - **test**: fail when child dies in fork-net (Joyee Cheung) [#11684](https://github.com/nodejs/node/pull/11684) +* [[`c626734409`](https://github.com/nodejs/node/commit/c626734409)] - **tls**: fix segfault on destroy after partial read (Ben Noordhuis) [#11898](https://github.com/nodejs/node/pull/11898) +* [[`646ee559df`](https://github.com/nodejs/node/commit/646ee559df)] - **tls**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) +* [[`540830116b`](https://github.com/nodejs/node/commit/540830116b)] - **tls**: keep track of stream that is closed (jBarz) [#11776](https://github.com/nodejs/node/pull/11776) +* [[`9a59913039`](https://github.com/nodejs/node/commit/9a59913039)] - **util**: avoid using forEach (James M Snell) [#11582](https://github.com/nodejs/node/pull/11582) + ## 2017-03-14, Version 7.7.3 (Current), @italoacasas diff --git a/lib/_http_agent.js b/lib/_http_agent.js index 0661ee15537..351417a7ba6 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -105,7 +105,6 @@ function Agent(options) { } util.inherits(Agent, EventEmitter); -exports.Agent = Agent; Agent.defaultMaxSockets = Infinity; @@ -314,4 +313,7 @@ Agent.prototype.destroy = function destroy() { } }; -exports.globalAgent = new Agent(); +module.exports = { + Agent, + globalAgent: new Agent() +}; diff --git a/lib/_http_client.js b/lib/_http_client.js index b818b05c527..d281b120c85 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -66,8 +66,7 @@ function isInvalidPath(s) { } function ClientRequest(options, cb) { - var self = this; - OutgoingMessage.call(self); + OutgoingMessage.call(this); if (typeof options === 'string') { options = url.parse(options); @@ -95,12 +94,12 @@ function ClientRequest(options, cb) { 'Agent option must be an instance of http.Agent, undefined or false.' ); } - self.agent = agent; + this.agent = agent; var protocol = options.protocol || defaultAgent.protocol; var expectedProtocol = defaultAgent.protocol; - if (self.agent && self.agent.protocol) - expectedProtocol = self.agent.protocol; + if (this.agent && this.agent.protocol) + expectedProtocol = this.agent.protocol; var path; if (options.path) { @@ -121,15 +120,15 @@ function ClientRequest(options, cb) { } const defaultPort = options.defaultPort || - self.agent && self.agent.defaultPort; + this.agent && this.agent.defaultPort; var port = options.port = options.port || defaultPort || 80; var host = options.host = options.hostname || options.host || 'localhost'; var setHost = (options.setHost === undefined); - self.socketPath = options.socketPath; - self.timeout = options.timeout; + this.socketPath = options.socketPath; + this.timeout = options.timeout; var method = options.method; var methodIsString = (typeof method === 'string'); @@ -141,14 +140,14 @@ function ClientRequest(options, cb) { if (!common._checkIsHttpToken(method)) { throw new TypeError('Method must be a valid HTTP token'); } - method = self.method = method.toUpperCase(); + method = this.method = method.toUpperCase(); } else { - method = self.method = 'GET'; + method = this.method = 'GET'; } - self.path = options.path || '/'; + this.path = options.path || '/'; if (cb) { - self.once('response', cb); + this.once('response', cb); } var headersArray = Array.isArray(options.headers); @@ -157,7 +156,7 @@ function ClientRequest(options, cb) { var keys = Object.keys(options.headers); for (var i = 0; i < keys.length; i++) { var key = keys[i]; - self.setHeader(key, options.headers[key]); + this.setHeader(key, options.headers[key]); } } if (host && !this.getHeader('host') && setHost) { @@ -190,21 +189,22 @@ function ClientRequest(options, cb) { method === 'DELETE' || method === 'OPTIONS' || method === 'CONNECT') { - self.useChunkedEncodingByDefault = false; + this.useChunkedEncodingByDefault = false; } else { - self.useChunkedEncodingByDefault = true; + this.useChunkedEncodingByDefault = true; } if (headersArray) { - self._storeHeader(self.method + ' ' + self.path + ' HTTP/1.1\r\n', + this._storeHeader(this.method + ' ' + this.path + ' HTTP/1.1\r\n', options.headers); - } else if (self.getHeader('expect')) { - if (self._header) { + } else if (this.getHeader('expect')) { + if (this._header) { throw new Error('Can\'t render headers after they are sent to the ' + 'client'); } - self._storeHeader(self.method + ' ' + self.path + ' HTTP/1.1\r\n', - self[outHeadersKey]); + + this._storeHeader(this.method + ' ' + this.path + ' HTTP/1.1\r\n', + this[outHeadersKey]); } this._ended = false; @@ -216,77 +216,69 @@ function ClientRequest(options, cb) { this.maxHeadersCount = null; var called = false; - if (self.socketPath) { - self._last = true; - self.shouldKeepAlive = false; + + const oncreate = (err, socket) => { + if (called) + return; + called = true; + if (err) { + process.nextTick(() => this.emit('error', err)); + return; + } + this.onSocket(socket); + this._deferToConnect(null, null, () => this._flush()); + }; + + if (this.socketPath) { + this._last = true; + this.shouldKeepAlive = false; const optionsPath = { - path: self.socketPath, - timeout: self.timeout + path: this.socketPath, + timeout: this.timeout }; - const newSocket = self.agent.createConnection(optionsPath, oncreate); + const newSocket = this.agent.createConnection(optionsPath, oncreate); if (newSocket && !called) { called = true; - self.onSocket(newSocket); + this.onSocket(newSocket); } else { return; } - } else if (self.agent) { + } else if (this.agent) { // If there is an agent we should default to Connection:keep-alive, // but only if the Agent will actually reuse the connection! // If it's not a keepAlive agent, and the maxSockets==Infinity, then // there's never a case where this socket will actually be reused - if (!self.agent.keepAlive && !Number.isFinite(self.agent.maxSockets)) { - self._last = true; - self.shouldKeepAlive = false; + if (!this.agent.keepAlive && !Number.isFinite(this.agent.maxSockets)) { + this._last = true; + this.shouldKeepAlive = false; } else { - self._last = false; - self.shouldKeepAlive = true; + this._last = false; + this.shouldKeepAlive = true; } - self.agent.addRequest(self, options); + this.agent.addRequest(this, options); } else { // No agent, default to Connection:close. - self._last = true; - self.shouldKeepAlive = false; + this._last = true; + this.shouldKeepAlive = false; if (typeof options.createConnection === 'function') { const newSocket = options.createConnection(options, oncreate); if (newSocket && !called) { called = true; - self.onSocket(newSocket); + this.onSocket(newSocket); } else { return; } } else { debug('CLIENT use net.createConnection', options); - self.onSocket(net.createConnection(options)); - } - } - - function oncreate(err, socket) { - if (called) - return; - called = true; - if (err) { - process.nextTick(function() { - self.emit('error', err); - }); - return; + this.onSocket(net.createConnection(options)); } - self.onSocket(socket); - self._deferToConnect(null, null, function() { - self._flush(); - self = null; - }); } - self._deferToConnect(null, null, function() { - self._flush(); - self = null; - }); + this._deferToConnect(null, null, () => this._flush()); } util.inherits(ClientRequest, OutgoingMessage); -exports.ClientRequest = ClientRequest; ClientRequest.prototype._finish = function _finish() { DTRACE_HTTP_CLIENT_REQUEST(this, this.connection); @@ -305,7 +297,7 @@ ClientRequest.prototype._implicitHeader = function _implicitHeader() { ClientRequest.prototype.abort = function abort() { if (!this.aborted) { - process.nextTick(emitAbortNT, this); + process.nextTick(emitAbortNT.bind(this)); } // Mark as aborting so we can avoid sending queued request data // This is used as a truthy flag elsewhere. The use of Date.now is for @@ -329,8 +321,8 @@ ClientRequest.prototype.abort = function abort() { }; -function emitAbortNT(self) { - self.emit('abort'); +function emitAbortNT() { + this.emit('abort'); } @@ -682,26 +674,25 @@ function _deferToConnect(method, arguments_, cb) { // calls that happen either now (when a socket is assigned) or // in the future (when a socket gets assigned out of the pool and is // eventually writable). - var self = this; - function callSocketMethod() { + const callSocketMethod = () => { if (method) - self.socket[method].apply(self.socket, arguments_); + this.socket[method].apply(this.socket, arguments_); if (typeof cb === 'function') cb(); - } + }; - var onSocket = function onSocket() { - if (self.socket.writable) { + const onSocket = () => { + if (this.socket.writable) { callSocketMethod(); } else { - self.socket.once('connect', callSocketMethod); + this.socket.once('connect', callSocketMethod); } }; - if (!self.socket) { - self.once('socket', onSocket); + if (!this.socket) { + this.once('socket', onSocket); } else { onSocket(); } @@ -710,10 +701,7 @@ function _deferToConnect(method, arguments_, cb) { ClientRequest.prototype.setTimeout = function setTimeout(msecs, callback) { if (callback) this.once('timeout', callback); - var self = this; - function emitTimeout() { - self.emit('timeout'); - } + const emitTimeout = () => this.emit('timeout'); if (this.socket && this.socket.writable) { if (this.timeoutCb) @@ -752,3 +740,7 @@ ClientRequest.prototype.setSocketKeepAlive = ClientRequest.prototype.clearTimeout = function clearTimeout(cb) { this.setTimeout(0, cb); }; + +module.exports = { + ClientRequest +}; diff --git a/lib/_http_common.js b/lib/_http_common.js index c2ffbfce804..52cabd87fdf 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -26,18 +26,13 @@ const methods = binding.methods; const HTTPParser = binding.HTTPParser; const FreeList = require('internal/freelist').FreeList; +const ondrain = require('internal/http').ondrain; const incoming = require('_http_incoming'); const IncomingMessage = incoming.IncomingMessage; const readStart = incoming.readStart; const readStop = incoming.readStop; const debug = require('util').debuglog('http'); -exports.debug = debug; - -exports.CRLF = '\r\n'; -exports.chunkExpression = /(?:^|\W)chunked(?:$|\W)/i; -exports.continueExpression = /(?:^|\W)100-continue(?:$|\W)/i; -exports.methods = methods; const kOnHeaders = HTTPParser.kOnHeaders | 0; const kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; @@ -194,7 +189,6 @@ var parsers = new FreeList('parsers', 1000, function() { return parser; }); -exports.parsers = parsers; // Free the parser and also break any links that it @@ -227,19 +221,12 @@ function freeParser(parser, req, socket) { socket.parser = null; } } -exports.freeParser = freeParser; - - -function ondrain() { - if (this._httpMessage) this._httpMessage.emit('drain'); -} function httpSocketSetup(socket) { socket.removeListener('drain', ondrain); socket.on('drain', ondrain); } -exports.httpSocketSetup = httpSocketSetup; /** * Verifies that the given val is a valid HTTP token @@ -306,7 +293,6 @@ function checkIsHttpToken(val) { } return true; } -exports._checkIsHttpToken = checkIsHttpToken; /** * True if val contains an invalid field-vchar @@ -360,4 +346,16 @@ function checkInvalidHeaderChar(val) { } return false; } -exports._checkInvalidHeaderChar = checkInvalidHeaderChar; + +module.exports = { + _checkInvalidHeaderChar: checkInvalidHeaderChar, + _checkIsHttpToken: checkIsHttpToken, + chunkExpression: /(?:^|\W)chunked(?:$|\W)/i, + continueExpression: /(?:^|\W)100-continue(?:$|\W)/i, + CRLF: '\r\n', + debug, + freeParser, + httpSocketSetup, + methods, + parsers +}; diff --git a/lib/_http_incoming.js b/lib/_http_incoming.js index b0d5c29b209..9151836ad83 100644 --- a/lib/_http_incoming.js +++ b/lib/_http_incoming.js @@ -28,14 +28,11 @@ function readStart(socket) { if (socket && !socket._paused && socket.readable) socket.resume(); } -exports.readStart = readStart; function readStop(socket) { if (socket) socket.pause(); } -exports.readStop = readStop; - /* Abstract base class for ServerRequest and ClientResponse. */ function IncomingMessage(socket) { @@ -83,9 +80,6 @@ function IncomingMessage(socket) { util.inherits(IncomingMessage, Stream.Readable); -exports.IncomingMessage = IncomingMessage; - - IncomingMessage.prototype.setTimeout = function setTimeout(msecs, callback) { if (callback) this.on('timeout', callback); @@ -150,10 +144,9 @@ function _addHeaderLines(headers, n) { // TODO: perhaps http_parser could be returning both raw and lowercased versions // of known header names to avoid us having to call toLowerCase() for those // headers. -/* eslint-disable max-len */ + // 'array' header list is taken from: // https://mxr.mozilla.org/mozilla/source/netwerk/protocol/http/src/nsHttpHeaderArray.cpp -/* eslint-enable max-len */ function matchKnownFields(field) { var low = false; while (true) { @@ -324,3 +317,9 @@ IncomingMessage.prototype._dump = function _dump() { this.resume(); } }; + +module.exports = { + IncomingMessage, + readStart, + readStop +}; diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 130adef1de1..cdca2d4e89d 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -625,7 +625,7 @@ const crlf_buf = Buffer.from('\r\n'); OutgoingMessage.prototype.write = function write(chunk, encoding, callback) { if (this.finished) { var err = new Error('write after end'); - process.nextTick(writeAfterEndNT, this, err, callback); + process.nextTick(writeAfterEndNT.bind(this), err, callback); return true; } @@ -684,8 +684,8 @@ OutgoingMessage.prototype.write = function write(chunk, encoding, callback) { }; -function writeAfterEndNT(self, err, callback) { - self.emit('error', err); +function writeAfterEndNT(err, callback) { + this.emit('error', err); if (callback) callback(err); } @@ -887,3 +887,8 @@ OutgoingMessage.prototype.flushHeaders = function flushHeaders() { OutgoingMessage.prototype.flush = internalUtil.deprecate(function() { this.flushHeaders(); }, 'OutgoingMessage.flush is deprecated. Use flushHeaders instead.', 'DEP0001'); + + +module.exports = { + OutgoingMessage +}; diff --git a/lib/_http_server.js b/lib/_http_server.js index 0a300ade618..7b3f004f9af 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -34,9 +34,9 @@ const continueExpression = common.continueExpression; const chunkExpression = common.chunkExpression; const httpSocketSetup = common.httpSocketSetup; const OutgoingMessage = require('_http_outgoing').OutgoingMessage; -const outHeadersKey = require('internal/http').outHeadersKey; +const { outHeadersKey, ondrain } = require('internal/http'); -const STATUS_CODES = exports.STATUS_CODES = { +const STATUS_CODES = { 100: 'Continue', 101: 'Switching Protocols', 102: 'Processing', // RFC 2518, obsoleted by RFC 4918 @@ -128,8 +128,6 @@ ServerResponse.prototype._finish = function _finish() { }; -exports.ServerResponse = ServerResponse; - ServerResponse.prototype.statusCode = 200; ServerResponse.prototype.statusMessage = undefined; @@ -265,11 +263,9 @@ function Server(requestListener) { this.on('request', requestListener); } - /* eslint-disable max-len */ // Similar option to this. Too lazy to write my own docs. // http://www.squid-cache.org/Doc/config/half_closed_clients/ // http://wiki.squid-cache.org/SquidFaq/InnerWorkings#What_is_a_half-closed_filedescriptor.3F - /* eslint-enable max-len */ this.httpAllowHalfOpen = false; this.on('connection', connectionListener); @@ -290,9 +286,6 @@ Server.prototype.setTimeout = function setTimeout(msecs, callback) { }; -exports.Server = Server; - - function connectionListener(socket) { debug('SERVER new http connection'); @@ -303,7 +296,7 @@ function connectionListener(socket) { // otherwise, destroy on timeout by default if (this.timeout) socket.setTimeout(this.timeout); - socket.on('timeout', socketOnTimeout.bind(undefined, this, socket)); + socket.on('timeout', socketOnTimeout); var parser = parsers.alloc(); parser.reinitialize(HTTPParser.REQUEST); @@ -321,9 +314,9 @@ function connectionListener(socket) { var state = { onData: null, - onError: null, onEnd: null, onClose: null, + onDrain: null, outgoing: [], incoming: [], // `outgoingData` is an approximate amount of bytes queued through all @@ -333,21 +326,20 @@ function connectionListener(socket) { outgoingData: 0 }; state.onData = socketOnData.bind(undefined, this, socket, parser, state); - state.onError = socketOnError.bind(undefined, this, socket, state); state.onEnd = socketOnEnd.bind(undefined, this, socket, parser, state); state.onClose = socketOnClose.bind(undefined, socket, state); + state.onDrain = socketOnDrain.bind(undefined, socket, state); socket.on('data', state.onData); - socket.on('error', state.onError); + socket.on('error', socketOnError); socket.on('end', state.onEnd); socket.on('close', state.onClose); + socket.on('drain', state.onDrain); parser.onIncoming = parserOnIncoming.bind(undefined, this, socket, state); // We are consuming socket, so it won't get any actual data socket.on('resume', onSocketResume); socket.on('pause', onSocketPause); - socket.on('drain', socketOnDrain.bind(undefined, socket, state)); - // Override on to unconsume on `data`, `readable` listeners socket.on = socketOnWrap; @@ -363,7 +355,7 @@ function connectionListener(socket) { socket._paused = false; } -exports._connectionListener = connectionListener; + function updateOutgoingData(socket, state, delta) { state.outgoingData += delta; @@ -385,15 +377,15 @@ function socketOnDrain(socket, state) { } } -function socketOnTimeout(server, socket) { - var req = socket.parser && socket.parser.incoming; - var reqTimeout = req && !req.complete && req.emit('timeout', socket); - var res = socket._httpMessage; - var resTimeout = res && res.emit('timeout', socket); - var serverTimeout = server.emit('timeout', socket); +function socketOnTimeout() { + var req = this.parser && this.parser.incoming; + var reqTimeout = req && !req.complete && req.emit('timeout', this); + var res = this._httpMessage; + var resTimeout = res && res.emit('timeout', this); + var serverTimeout = this.server.emit('timeout', this); if (!reqTimeout && !resTimeout && !serverTimeout) - socket.destroy(); + this.destroy(); } function socketOnClose(socket, state) { @@ -420,7 +412,7 @@ function socketOnEnd(server, socket, parser, state) { if (ret instanceof Error) { debug('parse error'); - state.onError(ret); + socketOnError.call(socket, ret); return; } @@ -450,19 +442,19 @@ function onParserExecute(server, socket, parser, state, ret, d) { onParserExecuteCommon(server, socket, parser, state, ret, undefined); } -function socketOnError(server, socket, state, e) { +function socketOnError(e) { // Ignore further errors - socket.removeListener('error', state.onError); - socket.on('error', () => {}); + this.removeListener('error', socketOnError); + this.on('error', () => {}); - if (!server.emit('clientError', e, socket)) - socket.destroy(e); + if (!this.server.emit('clientError', e, this)) + this.destroy(e); } function onParserExecuteCommon(server, socket, parser, state, ret, d) { if (ret instanceof Error) { debug('parse error'); - state.onError(ret); + socketOnError.call(socket, ret); } else if (parser.incoming && parser.incoming.upgrade) { // Upgrade or CONNECT var bytesParsed = ret; @@ -475,6 +467,8 @@ function onParserExecuteCommon(server, socket, parser, state, ret, d) { socket.removeListener('data', state.onData); socket.removeListener('end', state.onEnd); socket.removeListener('close', state.onClose); + socket.removeListener('drain', state.onDrain); + socket.removeListener('drain', ondrain); unconsume(parser, socket); parser.finish(); freeParser(parser, req, null); @@ -640,3 +634,10 @@ function socketOnWrap(ev, fn) { return res; } + +module.exports = { + STATUS_CODES, + Server, + ServerResponse, + _connectionListener: connectionListener +}; diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index e1767c5e672..288f82e05b3 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -920,17 +920,8 @@ Server.prototype.setTicketKeys = function setTicketKeys(keys) { Server.prototype.setOptions = function(options) { - if (typeof options.requestCert === 'boolean') { - this.requestCert = options.requestCert; - } else { - this.requestCert = false; - } - - if (typeof options.rejectUnauthorized === 'boolean') { - this.rejectUnauthorized = options.rejectUnauthorized; - } else { - this.rejectUnauthorized = false; - } + this.requestCert = options.requestCert === true; + this.rejectUnauthorized = options.rejectUnauthorized !== false; if (options.pfx) this.pfx = options.pfx; if (options.key) this.key = options.key; @@ -1062,7 +1053,7 @@ exports.connect = function(...args /* [port,] [host,] [options,] [cb] */) { secureContext: context, isServer: false, requestCert: true, - rejectUnauthorized: options.rejectUnauthorized, + rejectUnauthorized: options.rejectUnauthorized !== false, session: options.session, NPNProtocols: NPN.NPNProtocols, ALPNProtocols: ALPN.ALPNProtocols, diff --git a/lib/buffer.js b/lib/buffer.js index 474fc8456ff..b896e834ca6 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -19,7 +19,6 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -/* eslint-disable require-buffer */ 'use strict'; const binding = process.binding('buffer'); diff --git a/lib/events.js b/lib/events.js index ac080117665..a186d712578 100644 --- a/lib/events.js +++ b/lib/events.js @@ -56,6 +56,10 @@ Object.defineProperty(EventEmitter, 'defaultMaxListeners', { // force global console to be compiled. // see https://github.com/nodejs/node/issues/4467 console; + // check whether the input is a positive number (whose value is zero or + // greater and not a NaN). + if (typeof arg !== 'number' || arg < 0 || arg !== arg) + throw new TypeError('"defaultMaxListeners" must be a positive number'); defaultMaxListeners = arg; } }); diff --git a/lib/fs.js b/lib/fs.js index 6487d39e481..3d65b2efa00 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -1058,7 +1058,7 @@ fs.fchmodSync = function(fd, mode) { return binding.fchmod(fd, modeNum(mode)); }; -if (constants.hasOwnProperty('O_SYMLINK')) { +if (constants.O_SYMLINK !== undefined) { fs.lchmod = function(path, mode, callback) { callback = maybeCallback(callback); fs.open(path, constants.O_WRONLY | constants.O_SYMLINK, function(err, fd) { @@ -1116,7 +1116,7 @@ fs.chmodSync = function(path, mode) { return binding.chmod(pathModule._makeLong(path), modeNum(mode)); }; -if (constants.hasOwnProperty('O_SYMLINK')) { +if (constants.O_SYMLINK !== undefined) { fs.lchown = function(path, uid, gid, callback) { callback = maybeCallback(callback); fs.open(path, constants.O_WRONLY | constants.O_SYMLINK, function(err, fd) { @@ -1165,8 +1165,8 @@ function toUnixTimestamp(time) { if (typeof time === 'string' && +time == time) { return +time; } - if (typeof time === 'number') { - if (!Number.isFinite(time) || time < 0) { + if (Number.isFinite(time)) { + if (time < 0) { return Date.now() / 1000; } return time; diff --git a/lib/http.js b/lib/http.js index 4b0e589a568..af3e8a017ce 100644 --- a/lib/http.js +++ b/lib/http.js @@ -20,35 +20,43 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -exports.IncomingMessage = require('_http_incoming').IncomingMessage; - -exports.OutgoingMessage = require('_http_outgoing').OutgoingMessage; - -exports.METHODS = require('_http_common').methods.slice().sort(); const agent = require('_http_agent'); -exports.Agent = agent.Agent; -exports.globalAgent = agent.globalAgent; - +const client = require('_http_client'); +const common = require('_http_common'); +const incoming = require('_http_incoming'); +const outgoing = require('_http_outgoing'); const server = require('_http_server'); -exports.ServerResponse = server.ServerResponse; -exports.STATUS_CODES = server.STATUS_CODES; -exports._connectionListener = server._connectionListener; -const Server = exports.Server = server.Server; -exports.createServer = function createServer(requestListener) { - return new Server(requestListener); -}; +const Server = server.Server; +const ClientRequest = client.ClientRequest; -const client = require('_http_client'); -const ClientRequest = exports.ClientRequest = client.ClientRequest; +function createServer(requestListener) { + return new Server(requestListener); +} -exports.request = function request(options, cb) { +function request(options, cb) { return new ClientRequest(options, cb); -}; +} -exports.get = function get(options, cb) { - var req = exports.request(options, cb); +function get(options, cb) { + var req = request(options, cb); req.end(); return req; +} + +module.exports = { + _connectionListener: server._connectionListener, + METHODS: common.methods.slice().sort(), + STATUS_CODES: server.STATUS_CODES, + Agent: agent.Agent, + ClientRequest, + globalAgent: agent.globalAgent, + IncomingMessage: incoming.IncomingMessage, + OutgoingMessage: outgoing.OutgoingMessage, + Server, + ServerResponse: server.ServerResponse, + createServer, + get, + request }; diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 3cab1409422..01e4e482cb0 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -6,16 +6,11 @@ // value statically and permanently identifies the error. While the error // message may change, the code should not. +const assert = require('assert'); const kCode = Symbol('code'); const messages = new Map(); -var assert, util; -function lazyAssert() { - if (!assert) - assert = require('assert'); - return assert; -} - +var util; function lazyUtil() { if (!util) util = require('util'); @@ -41,7 +36,6 @@ function makeNodeError(Base) { } function message(key, args) { - const assert = lazyAssert(); assert.strictEqual(typeof key, 'string'); const util = lazyUtil(); const msg = messages.get(key); @@ -60,7 +54,6 @@ function message(key, args) { // Utility function for registering the error codes. Only used here. Exported // *only* to allow for testing. function E(sym, val) { - const assert = lazyAssert(); assert(messages.has(sym) === false, `Error symbol: ${sym} was already used.`); messages.set(sym, typeof val === 'function' ? val : String(val)); } diff --git a/lib/internal/http.js b/lib/internal/http.js index 1ba68a41a56..0b12bc7c8fb 100644 --- a/lib/internal/http.js +++ b/lib/internal/http.js @@ -1,5 +1,10 @@ 'use strict'; +function ondrain() { + if (this._httpMessage) this._httpMessage.emit('drain'); +} + module.exports = { - outHeadersKey: Symbol('outHeadersKey') + outHeadersKey: Symbol('outHeadersKey'), + ondrain, }; diff --git a/lib/internal/process.js b/lib/internal/process.js index ab6107c4c64..023f9740e0f 100644 --- a/lib/internal/process.js +++ b/lib/internal/process.js @@ -203,8 +203,7 @@ function setupSignalHandlers() { const signalWraps = {}; function isSignal(event) { - return typeof event === 'string' && - lazyConstants().hasOwnProperty(event); + return typeof event === 'string' && lazyConstants()[event] !== undefined; } // Detect presence of a listener for the special signal types diff --git a/lib/internal/querystring.js b/lib/internal/querystring.js index 2f8d77d3e9d..c5dc0f63c7b 100644 --- a/lib/internal/querystring.js +++ b/lib/internal/querystring.js @@ -4,6 +4,25 @@ const hexTable = new Array(256); for (var i = 0; i < 256; ++i) hexTable[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); +const isHexTable = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 32 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63 + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 64 - 79 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 80 - 95 + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 96 - 111 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 112 - 127 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128 ... + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 // ... 256 +]; + // Instantiating this is faster than explicitly calling `Object.create(null)` // to get a "clean" empty object (tested with v8 v4.9). function StorageObject() {} @@ -11,5 +30,6 @@ StorageObject.prototype = Object.create(null); module.exports = { hexTable, + isHexTable, StorageObject }; diff --git a/lib/internal/url.js b/lib/internal/url.js index 005f5b66476..7a6ff227ed4 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -1,7 +1,11 @@ 'use strict'; const util = require('util'); -const { hexTable, StorageObject } = require('internal/querystring'); +const { + hexTable, + isHexTable, + StorageObject +} = require('internal/querystring'); const binding = process.binding('url'); const context = Symbol('context'); const cannotBeBase = Symbol('cannot-be-base'); @@ -95,8 +99,6 @@ class TupleOrigin { function onParseComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - throw new TypeError('Invalid URL'); var ctx = this[context]; ctx.flags = flags; ctx.scheme = protocol; @@ -114,25 +116,36 @@ function onParseComplete(flags, protocol, username, password, initSearchParams(this[searchParams], query); } +function onParseError(flags, input) { + const error = new TypeError('Invalid URL: ' + input); + error.input = input; + throw error; +} + // Reused by URL constructor and URL#href setter. function parse(url, input, base) { const base_context = base ? base[context] : undefined; url[context] = new StorageObject(); binding.parse(input.trim(), -1, base_context, undefined, - onParseComplete.bind(url)); + onParseComplete.bind(url), onParseError); } function onParseProtocolComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const newIsSpecial = (flags & binding.URL_FLAGS_SPECIAL) !== 0; const s = this[special]; const ctx = this[context]; if ((s && !newIsSpecial) || (!s && newIsSpecial)) { return; } + if (protocol === 'file:' && + (ctx.username || ctx.password || ctx.port !== undefined)) { + return; + } + if (ctx.scheme === 'file:' && !ctx.host) { + return; + } if (newIsSpecial) { ctx.flags |= binding.URL_FLAGS_SPECIAL; } else { @@ -148,8 +161,6 @@ function onParseProtocolComplete(flags, protocol, username, password, function onParseHostComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (host) { ctx.host = host; @@ -163,8 +174,6 @@ function onParseHostComplete(flags, protocol, username, password, function onParseHostnameComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (host) { ctx.host = host; @@ -176,15 +185,11 @@ function onParseHostnameComplete(flags, protocol, username, password, function onParsePortComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; this[context].port = port; } function onParsePathComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (path) { ctx.path = path; @@ -196,16 +201,12 @@ function onParsePathComplete(flags, protocol, username, password, function onParseSearchComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; ctx.query = query; } function onParseHashComplete(flags, protocol, username, password, host, port, path, query, fragment) { - if (flags & binding.URL_FLAGS_FAILED) - return; const ctx = this[context]; if (fragment) { ctx.fragment = fragment; @@ -578,23 +579,106 @@ function initSearchParams(url, init) { url[searchParams] = []; return; } - url[searchParams] = getParamsFromObject(querystring.parse(init)); + url[searchParams] = parseParams(init); } -function getParamsFromObject(obj) { - const keys = Object.keys(obj); - const values = []; - for (var i = 0; i < keys.length; i++) { - const name = keys[i]; - const value = obj[name]; - if (Array.isArray(value)) { - for (const item of value) - values.push(name, item); - } else { - values.push(name, value); +// application/x-www-form-urlencoded parser +// Ref: https://url.spec.whatwg.org/#concept-urlencoded-parser +function parseParams(qs) { + const out = []; + var pairStart = 0; + var lastPos = 0; + var seenSep = false; + var buf = ''; + var encoded = false; + var encodeCheck = 0; + var i; + for (i = 0; i < qs.length; ++i) { + const code = qs.charCodeAt(i); + + // Try matching key/value pair separator + if (code === 38/*&*/) { + if (pairStart === i) { + // We saw an empty substring between pair separators + lastPos = pairStart = i + 1; + continue; + } + + if (lastPos < i) + buf += qs.slice(lastPos, i); + if (encoded) + buf = querystring.unescape(buf); + out.push(buf); + + // If `buf` is the key, add an empty value. + if (!seenSep) + out.push(''); + + seenSep = false; + buf = ''; + encoded = false; + encodeCheck = 0; + lastPos = pairStart = i + 1; + continue; + } + + // Try matching key/value separator (e.g. '=') if we haven't already + if (!seenSep && code === 61/*=*/) { + // Key/value separator match! + if (lastPos < i) + buf += qs.slice(lastPos, i); + if (encoded) + buf = querystring.unescape(buf); + out.push(buf); + + seenSep = true; + buf = ''; + encoded = false; + encodeCheck = 0; + lastPos = i + 1; + continue; + } + + // Handle + and percent decoding. + if (code === 43/*+*/) { + if (lastPos < i) + buf += qs.slice(lastPos, i); + buf += ' '; + lastPos = i + 1; + } else if (!encoded) { + // Try to match an (valid) encoded byte (once) to minimize unnecessary + // calls to string decoding functions + if (code === 37/*%*/) { + encodeCheck = 1; + } else if (encodeCheck > 0) { + // eslint-disable-next-line no-extra-boolean-cast + if (!!isHexTable[code]) { + if (++encodeCheck === 3) + encoded = true; + } else { + encodeCheck = 0; + } + } } } - return values; + + // Deal with any leftover key or value data + + // There is a trailing &. No more processing is needed. + if (pairStart === i) + return out; + + if (lastPos < i) + buf += qs.slice(lastPos, i); + if (encoded) + buf = querystring.unescape(buf); + out.push(buf); + + // If `buf` is the key, add an empty value. + if (!seenSep) + out.push(''); + + return out; } // Adapted from querystring's implementation. diff --git a/lib/querystring.js b/lib/querystring.js index 80e45842035..1533c8d87b7 100644 --- a/lib/querystring.js +++ b/lib/querystring.js @@ -24,7 +24,11 @@ 'use strict'; const { Buffer } = require('buffer'); -const { StorageObject, hexTable } = require('internal/querystring'); +const { + StorageObject, + hexTable, + isHexTable +} = require('internal/querystring'); const QueryString = module.exports = { unescapeBuffer, // `unescape()` is a JS global, so we need to use a different local name @@ -264,25 +268,6 @@ function stringify(obj, sep, eq, options) { return ''; } -const isHexTable = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 32 - 47 - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63 - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 64 - 79 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 80 - 95 - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 96 - 111 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 112 - 127 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128 ... - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 // ... 256 -]; - function charCodes(str) { if (str.length === 0) return []; if (str.length === 1) return [str.charCodeAt(0)]; diff --git a/lib/readline.js b/lib/readline.js index 3b719560b6d..d3245615566 100644 --- a/lib/readline.js +++ b/lib/readline.js @@ -60,7 +60,7 @@ function Interface(input, output, completer, terminal) { EventEmitter.call(this); var historySize; - var deDupeHistory = false; + var removeHistoryDuplicates = false; let crlfDelay; let prompt = '> '; @@ -70,7 +70,7 @@ function Interface(input, output, completer, terminal) { completer = input.completer; terminal = input.terminal; historySize = input.historySize; - deDupeHistory = input.deDupeHistory; + removeHistoryDuplicates = input.removeHistoryDuplicates; if (input.prompt !== undefined) { prompt = input.prompt; } @@ -103,7 +103,7 @@ function Interface(input, output, completer, terminal) { this.output = output; this.input = input; this.historySize = historySize; - this.deDupeHistory = !!deDupeHistory; + this.removeHistoryDuplicates = !!removeHistoryDuplicates; this.crlfDelay = Math.max(kMincrlfDelay, Math.min(kMaxcrlfDelay, crlfDelay >>> 0)); @@ -281,7 +281,7 @@ Interface.prototype._addHistory = function() { if (this.line.trim().length === 0) return this.line; if (this.history.length === 0 || this.history[0] !== this.line) { - if (this.deDupeHistory) { + if (this.removeHistoryDuplicates) { // Remove older history line if identical to new one const dupIndex = this.history.indexOf(this.line); if (dupIndex !== -1) this.history.splice(dupIndex, 1); diff --git a/lib/timers.js b/lib/timers.js index 2c6c3fc0e6e..115c3c82963 100644 --- a/lib/timers.js +++ b/lib/timers.js @@ -243,7 +243,6 @@ function listOnTimeout() { // As such, we can remove the list and clean up the TimerWrap C++ handle. debug('%d list empty', msecs); assert(L.isEmpty(list)); - this.close(); // Either refedLists[msecs] or unrefedLists[msecs] may have been removed and // recreated since the reference to `list` was created. Make sure they're @@ -253,6 +252,13 @@ function listOnTimeout() { } else if (list === refedLists[msecs]) { delete refedLists[msecs]; } + + // Do not close the underlying handle if its ownership has changed + // (e.g it was unrefed in its callback). + if (this.owner) + return; + + this.close(); } diff --git a/lib/util.js b/lib/util.js index b243c81dd50..80195c26722 100644 --- a/lib/util.js +++ b/lib/util.js @@ -38,14 +38,21 @@ const inspectDefaultOptions = Object.seal({ breakLength: 60 }); -const CIRCULAR_ERROR_MESSAGE = 'Converting circular structure to JSON'; - +var CIRCULAR_ERROR_MESSAGE; var Debug; function tryStringify(arg) { try { return JSON.stringify(arg); } catch (err) { + // Populate the circular error message lazily + if (!CIRCULAR_ERROR_MESSAGE) { + try { + const a = {}; a.a = a; JSON.stringify(a); + } catch (err) { + CIRCULAR_ERROR_MESSAGE = err.message; + } + } if (err.name === 'TypeError' && err.message === CIRCULAR_ERROR_MESSAGE) return '[Circular]'; throw err; diff --git a/src/node.cc b/src/node.cc index 2dc3de768ad..26b3f1dbc79 100644 --- a/src/node.cc +++ b/src/node.cc @@ -2700,6 +2700,8 @@ static void Binding(const FunctionCallbackInfo& args) { cache->Set(module, exports); } else if (!strcmp(*module_v, "constants")) { exports = Object::New(env->isolate()); + CHECK(exports->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); DefineConstants(env->isolate(), exports); cache->Set(module, exports); } else if (!strcmp(*module_v, "natives")) { @@ -4312,6 +4314,19 @@ inline void PlatformInit() { } while (min + 1 < max); } #endif // __POSIX__ +#ifdef _WIN32 + for (int fd = 0; fd <= 2; ++fd) { + auto handle = reinterpret_cast(_get_osfhandle(fd)); + if (handle == INVALID_HANDLE_VALUE || + GetFileType(handle) == FILE_TYPE_UNKNOWN) { + // Ignore _close result. If it fails or not depends on used Windows + // version. We will just check _open result. + _close(fd); + if (fd != _open("nul", _O_RDWR)) + ABORT(); + } + } +#endif // _WIN32 } diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 78b1845a2d5..85e202fc02b 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -169,7 +169,8 @@ void CallbackInfo::WeakCallback(Isolate* isolate) { // Parse index for external array data. inline MUST_USE_RESULT bool ParseArrayIndex(Local arg, size_t def, - size_t* ret) { + size_t* ret, + size_t needed = 0) { if (arg->IsUndefined()) { *ret = def; return true; @@ -183,7 +184,7 @@ inline MUST_USE_RESULT bool ParseArrayIndex(Local arg, // Check that the result fits in a size_t. const uint64_t kSizeMax = static_cast(static_cast(-1)); // coverity[pointless_expression] - if (static_cast(tmp_i) > kSizeMax) + if (static_cast(tmp_i) > kSizeMax - needed) return false; *ret = static_cast(tmp_i); @@ -868,17 +869,28 @@ void WriteFloatGeneric(const FunctionCallbackInfo& args) { CHECK_NE(ts_obj_data, nullptr); T val = args[1]->NumberValue(env->context()).FromMaybe(0); - size_t offset = args[2]->IntegerValue(env->context()).FromMaybe(0); size_t memcpy_num = sizeof(T); + size_t offset; - if (should_assert) { - THROW_AND_RETURN_IF_OOB(offset + memcpy_num >= memcpy_num); - THROW_AND_RETURN_IF_OOB(offset + memcpy_num <= ts_obj_length); + // If the offset is negative or larger than the size of the ArrayBuffer, + // throw an error (if needed) and return directly. + if (!ParseArrayIndex(args[2], 0, &offset, memcpy_num) || + offset >= ts_obj_length) { + if (should_assert) + THROW_AND_RETURN_IF_OOB(false); + return; } - if (offset + memcpy_num > ts_obj_length) - memcpy_num = ts_obj_length - offset; + // If the offset is too large for the entire value, but small enough to fit + // part of the value, throw an error and return only if should_assert is + // true. Otherwise, write the part of the value that fits. + if (offset + memcpy_num > ts_obj_length) { + if (should_assert) + THROW_AND_RETURN_IF_OOB(false); + else + memcpy_num = ts_obj_length - offset; + } union NoAlias { T val; diff --git a/src/node_constants.cc b/src/node_constants.cc index bed87b76498..5bde53fcdf1 100644 --- a/src/node_constants.cc +++ b/src/node_constants.cc @@ -1245,12 +1245,31 @@ void DefineZlibConstants(Local target) { } void DefineConstants(v8::Isolate* isolate, Local target) { + Environment* env = Environment::GetCurrent(isolate); + Local os_constants = Object::New(isolate); + CHECK(os_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); + Local err_constants = Object::New(isolate); + CHECK(err_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); + Local sig_constants = Object::New(isolate); + CHECK(sig_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); + Local fs_constants = Object::New(isolate); + CHECK(fs_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); + Local crypto_constants = Object::New(isolate); + CHECK(crypto_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); + Local zlib_constants = Object::New(isolate); + CHECK(zlib_constants->SetPrototype(env->context(), + Null(env->isolate())).FromJust()); DefineErrnoConstants(err_constants); DefineWindowsErrorConstants(err_constants); diff --git a/src/node_contextify.cc b/src/node_contextify.cc index d34d3a29dd6..2ab73478194 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -769,8 +769,7 @@ class ContextifyScript : public BaseObject { return -1; } - Local key = FIXED_ONE_BYTE_STRING(env->isolate(), "timeout"); - Local value = options.As()->Get(key); + Local value = options.As()->Get(env->timeout_string()); if (value->IsUndefined()) { return -1; } diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 70050806779..7698cf2062c 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -141,9 +141,11 @@ static X509_NAME *cnnic_ev_name = static Mutex* mutexes; +#if !defined(NODE_OPENSSL_CERT_STORE) const char* const root_certs[] = { #include "node_root_certs.h" // NOLINT(build/include_order) }; +#endif std::string extra_root_certs_file; // NOLINT(runtime/string) @@ -718,6 +720,7 @@ static int X509_up_ref(X509* cert) { static X509_STORE* NewRootCertStore() { +#if !defined(NODE_OPENSSL_CERT_STORE) if (root_certs_vector.empty()) { for (size_t i = 0; i < arraysize(root_certs); i++) { BIO* bp = NodeBIO::NewFixed(root_certs[i], strlen(root_certs[i])); @@ -730,6 +733,7 @@ static X509_STORE* NewRootCertStore() { root_certs_vector.push_back(x509); } } +#endif X509_STORE* store = X509_STORE_new(); if (ssl_openssl_cert_store) { @@ -2076,8 +2080,7 @@ void SSLWrap::VerifyError(const FunctionCallbackInfo& args) { Local reason_string = OneByteString(isolate, reason); Local exception_value = Exception::Error(reason_string); Local exception_object = exception_value->ToObject(isolate); - exception_object->Set(FIXED_ONE_BYTE_STRING(isolate, "code"), - OneByteString(isolate, code)); + exception_object->Set(w->env()->code_string(), OneByteString(isolate, code)); args.GetReturnValue().Set(exception_object); } @@ -3245,11 +3248,7 @@ void Connection::Start(const FunctionCallbackInfo& args) { void Connection::Close(const FunctionCallbackInfo& args) { Connection* conn; ASSIGN_OR_RETURN_UNWRAP(&conn, args.Holder()); - - if (conn->ssl_ != nullptr) { - SSL_free(conn->ssl_); - conn->ssl_ = nullptr; - } + conn->DestroySSL(); } @@ -3276,7 +3275,7 @@ void Connection::SetSNICallback(const FunctionCallbackInfo& args) { } Local obj = Object::New(env->isolate()); - obj->Set(FIXED_ONE_BYTE_STRING(args.GetIsolate(), "onselect"), args[0]); + obj->Set(env->onselect_string(), args[0]); conn->sniObject_.Reset(args.GetIsolate(), obj); } #endif @@ -5605,7 +5604,7 @@ void RandomBytes(const FunctionCallbackInfo& args) { RandomBytesRequest* req = new RandomBytesRequest(env, obj, size); if (args[1]->IsFunction()) { - obj->Set(FIXED_ONE_BYTE_STRING(args.GetIsolate(), "ondone"), args[1]); + obj->Set(env->ondone_string(), args[1]); if (env->in_domain()) obj->Set(env->domain_string(), env->domain_array()->Get(0)); diff --git a/src/node_url.cc b/src/node_url.cc index 1aae5571158..6cd78c2c6c0 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -47,45 +47,6 @@ using v8::Value; } \ } -#define CANNOT_BE_BASE() url.flags |= URL_FLAGS_CANNOT_BE_BASE; -#define INVALID_PARSE_STATE() url.flags |= URL_FLAGS_INVALID_PARSE_STATE; -#define SPECIAL() \ - { \ - url.flags |= URL_FLAGS_SPECIAL; \ - special = true; \ - } -#define TERMINATE() \ - { \ - url.flags |= URL_FLAGS_TERMINATED; \ - goto done; \ - } -#define URL_FAILED() \ - { \ - url.flags |= URL_FLAGS_FAILED; \ - goto done; \ - } - -#define CHECK_FLAG(flags, name) (flags & URL_FLAGS_##name) /* NOLINT */ - -#define IS_CANNOT_BE_BASE(flags) CHECK_FLAG(flags, CANNOT_BE_BASE) -#define IS_FAILED(flags) CHECK_FLAG(flags, FAILED) - -#define DOES_HAVE_SCHEME(url) CHECK_FLAG(url.flags, HAS_SCHEME) -#define DOES_HAVE_USERNAME(url) CHECK_FLAG(url.flags, HAS_USERNAME) -#define DOES_HAVE_PASSWORD(url) CHECK_FLAG(url.flags, HAS_PASSWORD) -#define DOES_HAVE_HOST(url) CHECK_FLAG(url.flags, HAS_HOST) -#define DOES_HAVE_PATH(url) CHECK_FLAG(url.flags, HAS_PATH) -#define DOES_HAVE_QUERY(url) CHECK_FLAG(url.flags, HAS_QUERY) -#define DOES_HAVE_FRAGMENT(url) CHECK_FLAG(url.flags, HAS_FRAGMENT) - -#define SET_HAVE_SCHEME() url.flags |= URL_FLAGS_HAS_SCHEME; -#define SET_HAVE_USERNAME() url.flags |= URL_FLAGS_HAS_USERNAME; -#define SET_HAVE_PASSWORD() url.flags |= URL_FLAGS_HAS_PASSWORD; -#define SET_HAVE_HOST() url.flags |= URL_FLAGS_HAS_HOST; -#define SET_HAVE_PATH() url.flags |= URL_FLAGS_HAS_PATH; -#define SET_HAVE_QUERY() url.flags |= URL_FLAGS_HAS_QUERY; -#define SET_HAVE_FRAGMENT() url.flags |= URL_FLAGS_HAS_FRAGMENT; - #define UTF8STRING(isolate, str) \ String::NewFromUtf8(isolate, str.c_str(), v8::NewStringType::kNormal) \ .ToLocalChecked() @@ -93,7 +54,7 @@ using v8::Value; namespace url { #if defined(NODE_HAVE_I18N_SUPPORT) - static bool ToUnicode(std::string* input, std::string* output) { + static inline bool ToUnicode(std::string* input, std::string* output) { MaybeStackBuffer buf; if (i18n::ToUnicode(&buf, input->c_str(), input->length()) < 0) return false; @@ -101,7 +62,7 @@ namespace url { return true; } - static bool ToASCII(std::string* input, std::string* output) { + static inline bool ToASCII(std::string* input, std::string* output) { MaybeStackBuffer buf; if (i18n::ToASCII(&buf, input->c_str(), input->length()) < 0) return false; @@ -110,12 +71,12 @@ namespace url { } #else // Intentional non-ops if ICU is not present. - static bool ToUnicode(std::string* input, std::string* output) { + static inline bool ToUnicode(std::string* input, std::string* output) { *output = *input; return true; } - static bool ToASCII(std::string* input, std::string* output) { + static inline bool ToASCII(std::string* input, std::string* output) { *output = *input; return true; } @@ -368,8 +329,7 @@ namespace url { } // First, we have to percent decode - if (PercentDecode(input, length, &decoded) < 0) - goto end; + PercentDecode(input, length, &decoded); // Then we have to punycode toASCII if (!ToASCII(&decoded, &decoded)) @@ -620,41 +580,26 @@ namespace url { url->path.pop_back(); } - static void Parse(Environment* env, - Local recv, - const char* input, - const size_t len, - enum url_parse_state state_override, - Local base_obj, - Local context_obj, - Local cb) { - Isolate* isolate = env->isolate(); - Local context = env->context(); - HandleScope handle_scope(isolate); - Context::Scope context_scope(context); - - const bool has_base = base_obj->IsObject(); + void URL::Parse(const char* input, + const size_t len, + enum url_parse_state state_override, + struct url_data* url, + const struct url_data* base, + bool has_base) { bool atflag = false; bool sbflag = false; bool uflag = false; bool base_is_file = false; int wskip = 0; - struct url_data base; - struct url_data url; - if (context_obj->IsObject()) - HarvestContext(env, &url, context_obj.As()); - if (has_base) - HarvestBase(env, &base, base_obj.As()); - std::string buffer; - url.scheme.reserve(len); - url.username.reserve(len); - url.password.reserve(len); - url.host.reserve(len); - url.path.reserve(len); - url.query.reserve(len); - url.fragment.reserve(len); + url->scheme.reserve(len); + url->username.reserve(len); + url->password.reserve(len); + url->host.reserve(len); + url->path.reserve(len); + url->query.reserve(len); + url->fragment.reserve(len); buffer.reserve(len); // Set the initial parse state. @@ -666,8 +611,8 @@ namespace url { const char* end = input + len; if (state < kSchemeStart || state > kFragment) { - INVALID_PARSE_STATE(); - goto done; + url->flags |= URL_FLAGS_INVALID_PARSE_STATE; + return; } while (p <= end) { @@ -685,7 +630,8 @@ namespace url { continue; } - bool special = url.flags & URL_FLAGS_SPECIAL; + bool special = (url->flags & URL_FLAGS_SPECIAL); + bool cannot_be_base; const bool special_back_slash = (special && ch == '\\'); switch (state) { case kSchemeStart: @@ -696,7 +642,8 @@ namespace url { state = kNoScheme; continue; } else { - TERMINATE() + url->flags |= URL_FLAGS_TERMINATED; + return; } break; case kScheme: @@ -707,23 +654,24 @@ namespace url { } else if (ch == ':' || (has_state_override && ch == kEOL)) { buffer += ':'; if (buffer.size() > 0) { - SET_HAVE_SCHEME() - url.scheme = buffer; + url->flags |= URL_FLAGS_HAS_SCHEME; + url->scheme = buffer; } - if (IsSpecial(url.scheme)) { - SPECIAL() + if (IsSpecial(url->scheme)) { + url->flags |= URL_FLAGS_SPECIAL; + special = true; } else { - url.flags &= ~URL_FLAGS_SPECIAL; + url->flags &= ~URL_FLAGS_SPECIAL; } if (has_state_override) - goto done; + return; buffer.clear(); - if (url.scheme == "file:") { + if (url->scheme == "file:") { state = kFile; } else if (special && has_base && - DOES_HAVE_SCHEME(base) && - url.scheme == base.scheme) { + base->flags & URL_FLAGS_HAS_SCHEME && + url->scheme == base->scheme) { state = kSpecialRelativeOrAuthority; } else if (special) { state = kSpecialAuthoritySlashes; @@ -731,9 +679,9 @@ namespace url { state = kPathOrAuthority; p++; } else { - CANNOT_BE_BASE() - SET_HAVE_PATH() - url.path.push_back(""); + url->flags |= URL_FLAGS_CANNOT_BE_BASE; + url->flags |= URL_FLAGS_HAS_PATH; + url->path.push_back(""); state = kCannotBeBase; } } else if (!has_state_override) { @@ -742,43 +690,48 @@ namespace url { p = input; continue; } else { - TERMINATE() + url->flags |= URL_FLAGS_TERMINATED; + return; } break; case kNoScheme: - if (!has_base || (IS_CANNOT_BE_BASE(base.flags) && ch != '#')) { - URL_FAILED() - } else if (IS_CANNOT_BE_BASE(base.flags) && ch == '#') { - SET_HAVE_SCHEME() - url.scheme = base.scheme; - if (IsSpecial(url.scheme)) { - SPECIAL() + cannot_be_base = base->flags & URL_FLAGS_CANNOT_BE_BASE; + if (!has_base || (cannot_be_base && ch != '#')) { + url->flags |= URL_FLAGS_FAILED; + return; + } else if (cannot_be_base && ch == '#') { + url->flags |= URL_FLAGS_HAS_SCHEME; + url->scheme = base->scheme; + if (IsSpecial(url->scheme)) { + url->flags |= URL_FLAGS_SPECIAL; + special = true; } else { - url.flags &= ~URL_FLAGS_SPECIAL; + url->flags &= ~URL_FLAGS_SPECIAL; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - if (DOES_HAVE_QUERY(base)) { - SET_HAVE_QUERY() - url.query = base.query; + if (base->flags & URL_FLAGS_HAS_QUERY) { + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = base->query; } - if (DOES_HAVE_FRAGMENT(base)) { - SET_HAVE_FRAGMENT() - url.fragment = base.fragment; + if (base->flags & URL_FLAGS_HAS_FRAGMENT) { + url->flags |= URL_FLAGS_HAS_FRAGMENT; + url->fragment = base->fragment; } - CANNOT_BE_BASE() + url->flags |= URL_FLAGS_CANNOT_BE_BASE; state = kFragment; } else if (has_base && - DOES_HAVE_SCHEME(base) && - base.scheme != "file:") { + base->flags & URL_FLAGS_HAS_SCHEME && + base->scheme != "file:") { state = kRelative; continue; } else { - SET_HAVE_SCHEME() - url.scheme = "file:"; - SPECIAL() + url->flags |= URL_FLAGS_HAS_SCHEME; + url->scheme = "file:"; + url->flags |= URL_FLAGS_SPECIAL; + special = true; state = kFile; continue; } @@ -801,106 +754,107 @@ namespace url { } break; case kRelative: - SET_HAVE_SCHEME() - url.scheme = base.scheme; - if (IsSpecial(url.scheme)) { - SPECIAL() + url->flags |= URL_FLAGS_HAS_SCHEME; + url->scheme = base->scheme; + if (IsSpecial(url->scheme)) { + url->flags |= URL_FLAGS_SPECIAL; + special = true; } else { - url.flags &= ~URL_FLAGS_SPECIAL; + url->flags &= ~URL_FLAGS_SPECIAL; } switch (ch) { case kEOL: - if (DOES_HAVE_USERNAME(base)) { - SET_HAVE_USERNAME() - url.username = base.username; + if (base->flags & URL_FLAGS_HAS_USERNAME) { + url->flags |= URL_FLAGS_HAS_USERNAME; + url->username = base->username; } - if (DOES_HAVE_PASSWORD(base)) { - SET_HAVE_PASSWORD() - url.password = base.password; + if (base->flags & URL_FLAGS_HAS_PASSWORD) { + url->flags |= URL_FLAGS_HAS_PASSWORD; + url->password = base->password; } - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_QUERY(base)) { - SET_HAVE_QUERY() - url.query = base.query; + if (base->flags & URL_FLAGS_HAS_QUERY) { + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = base->query; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - url.port = base.port; + url->port = base->port; break; case '/': state = kRelativeSlash; break; case '?': - if (DOES_HAVE_USERNAME(base)) { - SET_HAVE_USERNAME() - url.username = base.username; + if (base->flags & URL_FLAGS_HAS_USERNAME) { + url->flags |= URL_FLAGS_HAS_USERNAME; + url->username = base->username; } - if (DOES_HAVE_PASSWORD(base)) { - SET_HAVE_PASSWORD() - url.password = base.password; + if (base->flags & URL_FLAGS_HAS_PASSWORD) { + url->flags |= URL_FLAGS_HAS_PASSWORD; + url->password = base->password; } - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - url.port = base.port; + url->port = base->port; state = kQuery; break; case '#': - if (DOES_HAVE_USERNAME(base)) { - SET_HAVE_USERNAME() - url.username = base.username; + if (base->flags & URL_FLAGS_HAS_USERNAME) { + url->flags |= URL_FLAGS_HAS_USERNAME; + url->username = base->username; } - if (DOES_HAVE_PASSWORD(base)) { - SET_HAVE_PASSWORD() - url.password = base.password; + if (base->flags & URL_FLAGS_HAS_PASSWORD) { + url->flags |= URL_FLAGS_HAS_PASSWORD; + url->password = base->password; } - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_QUERY(base)) { - SET_HAVE_QUERY() - url.query = base.query; + if (base->flags & URL_FLAGS_HAS_QUERY) { + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = base->query; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - url.port = base.port; + url->port = base->port; state = kFragment; break; default: if (special_back_slash) { state = kRelativeSlash; } else { - if (DOES_HAVE_USERNAME(base)) { - SET_HAVE_USERNAME() - url.username = base.username; + if (base->flags & URL_FLAGS_HAS_USERNAME) { + url->flags |= URL_FLAGS_HAS_USERNAME; + url->username = base->username; } - if (DOES_HAVE_PASSWORD(base)) { - SET_HAVE_PASSWORD() - url.password = base.password; + if (base->flags & URL_FLAGS_HAS_PASSWORD) { + url->flags |= URL_FLAGS_HAS_PASSWORD; + url->password = base->password; } - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; - ShortenUrlPath(&url); + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; + ShortenUrlPath(url); } - url.port = base.port; + url->port = base->port; state = kPath; continue; } @@ -910,19 +864,19 @@ namespace url { if (ch == '/' || special_back_slash) { state = kSpecialAuthorityIgnoreSlashes; } else { - if (DOES_HAVE_USERNAME(base)) { - SET_HAVE_USERNAME() - url.username = base.username; + if (base->flags & URL_FLAGS_HAS_USERNAME) { + url->flags |= URL_FLAGS_HAS_USERNAME; + url->username = base->username; } - if (DOES_HAVE_PASSWORD(base)) { - SET_HAVE_PASSWORD() - url.password = base.password; + if (base->flags & URL_FLAGS_HAS_PASSWORD) { + url->flags |= URL_FLAGS_HAS_PASSWORD; + url->password = base->password; } - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - url.port = base.port; + url->port = base->port; state = kPath; continue; } @@ -950,21 +904,21 @@ namespace url { atflag = true; const size_t blen = buffer.size(); if (blen > 0 && buffer[0] != ':') { - SET_HAVE_USERNAME() + url->flags |= URL_FLAGS_HAS_USERNAME; } for (size_t n = 0; n < blen; n++) { const char bch = buffer[n]; if (bch == ':') { - SET_HAVE_PASSWORD() + url->flags |= URL_FLAGS_HAS_PASSWORD; if (!uflag) { uflag = true; continue; } } if (uflag) { - AppendOrEscape(&url.password, bch, UserinfoEncodeSet); + AppendOrEscape(&url->password, bch, UserinfoEncodeSet); } else { - AppendOrEscape(&url.username, bch, UserinfoEncodeSet); + AppendOrEscape(&url->username, bch, UserinfoEncodeSet); } } buffer.clear(); @@ -983,30 +937,42 @@ namespace url { case kHost: case kHostname: if (ch == ':' && !sbflag) { - if (special && buffer.size() == 0) - URL_FAILED() - SET_HAVE_HOST() - if (!ParseHost(&buffer, &url.host)) - URL_FAILED() + if (special && buffer.size() == 0) { + url->flags |= URL_FLAGS_FAILED; + return; + } + url->flags |= URL_FLAGS_HAS_HOST; + if (!ParseHost(&buffer, &url->host)) { + url->flags |= URL_FLAGS_FAILED; + return; + } buffer.clear(); state = kPort; - if (state_override == kHostname) - TERMINATE() + if (state_override == kHostname) { + url->flags |= URL_FLAGS_TERMINATED; + return; + } } else if (ch == kEOL || ch == '/' || ch == '?' || ch == '#' || special_back_slash) { p--; - if (special && buffer.size() == 0) - URL_FAILED() - SET_HAVE_HOST() - if (!ParseHost(&buffer, &url.host)) - URL_FAILED() + if (special && buffer.size() == 0) { + url->flags |= URL_FLAGS_FAILED; + return; + } + url->flags |= URL_FLAGS_HAS_HOST; + if (!ParseHost(&buffer, &url->host)) { + url->flags |= URL_FLAGS_FAILED; + return; + } buffer.clear(); state = kPathStart; - if (has_state_override) - TERMINATE() + if (has_state_override) { + url->flags |= URL_FLAGS_TERMINATED; + return; + } } else { if (ch == '[') sbflag = true; @@ -1029,37 +995,39 @@ namespace url { for (size_t i = 0; i < buffer.size(); i++) port = port * 10 + buffer[i] - '0'; if (port >= 0 && port <= 0xffff) { - url.port = NormalizePort(url.scheme, port); + url->port = NormalizePort(url->scheme, port); } else if (!has_state_override) { - URL_FAILED() + url->flags |= URL_FLAGS_FAILED; + return; } buffer.clear(); } state = kPathStart; continue; } else { - URL_FAILED(); + url->flags |= URL_FLAGS_FAILED; + return; } break; case kFile: base_is_file = ( has_base && - DOES_HAVE_SCHEME(base) && - base.scheme == "file:"); + base->flags & URL_FLAGS_HAS_SCHEME && + base->scheme == "file:"); switch (ch) { case kEOL: if (base_is_file) { - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - if (DOES_HAVE_QUERY(base)) { - SET_HAVE_QUERY() - url.query = base.query; + if (base->flags & URL_FLAGS_HAS_QUERY) { + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = base->query; } break; } @@ -1071,31 +1039,31 @@ namespace url { break; case '?': if (base_is_file) { - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - SET_HAVE_QUERY() + url->flags |= URL_FLAGS_HAS_QUERY; state = kQuery; break; } case '#': if (base_is_file) { - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - if (DOES_HAVE_QUERY(base)) { - SET_HAVE_QUERY() - url.query = base.query; + if (base->flags & URL_FLAGS_HAS_QUERY) { + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = base->query; } state = kFragment; break; @@ -1108,15 +1076,15 @@ namespace url { p[2] != '\\' && p[2] != '?' && p[2] != '#'))) { - if (DOES_HAVE_HOST(base)) { - SET_HAVE_HOST() - url.host = base.host; + if (base->flags & URL_FLAGS_HAS_HOST) { + url->flags |= URL_FLAGS_HAS_HOST; + url->host = base->host; } - if (DOES_HAVE_PATH(base)) { - SET_HAVE_PATH() - url.path = base.path; + if (base->flags & URL_FLAGS_HAS_PATH) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path = base->path; } - ShortenUrlPath(&url); + ShortenUrlPath(url); } state = kPath; continue; @@ -1127,13 +1095,13 @@ namespace url { state = kFileHost; } else { if (has_base && - DOES_HAVE_SCHEME(base) && - base.scheme == "file:" && - DOES_HAVE_PATH(base) && - base.path.size() > 0 && - NORMALIZED_WINDOWS_DRIVE_LETTER(base.path[0])) { - SET_HAVE_PATH() - url.path.push_back(base.path[0]); + base->flags & URL_FLAGS_HAS_SCHEME && + base->scheme == "file:" && + base->flags & URL_FLAGS_HAS_PATH && + base->path.size() > 0 && + NORMALIZED_WINDOWS_DRIVE_LETTER(base->path[0])) { + url->flags |= URL_FLAGS_HAS_PATH; + url->path.push_back(base->path[0]); } state = kPath; continue; @@ -1152,9 +1120,11 @@ namespace url { state = kPathStart; } else { if (buffer != "localhost") { - SET_HAVE_HOST() - if (!ParseHost(&buffer, &url.host)) - URL_FAILED() + url->flags |= URL_FLAGS_HAS_HOST; + if (!ParseHost(&buffer, &url->host)) { + url->flags |= URL_FLAGS_FAILED; + return; + } } buffer.clear(); state = kPathStart; @@ -1175,32 +1145,32 @@ namespace url { special_back_slash || (!has_state_override && (ch == '?' || ch == '#'))) { if (IsDoubleDotSegment(buffer)) { - ShortenUrlPath(&url); + ShortenUrlPath(url); if (ch != '/' && !special_back_slash) { - SET_HAVE_PATH() - url.path.push_back(""); + url->flags |= URL_FLAGS_HAS_PATH; + url->path.push_back(""); } } else if (IsSingleDotSegment(buffer)) { if (ch != '/' && !special_back_slash) { - SET_HAVE_PATH(); - url.path.push_back(""); + url->flags |= URL_FLAGS_HAS_PATH; + url->path.push_back(""); } } else { - if (DOES_HAVE_SCHEME(url) && - url.scheme == "file:" && - url.path.empty() && + if (url->flags & URL_FLAGS_HAS_SCHEME && + url->scheme == "file:" && + url->path.empty() && buffer.size() == 2 && WINDOWS_DRIVE_LETTER(buffer[0], buffer[1])) { - url.flags &= ~URL_FLAGS_HAS_HOST; + url->flags &= ~URL_FLAGS_HAS_HOST; buffer[1] = ':'; } - SET_HAVE_PATH() + url->flags |= URL_FLAGS_HAS_PATH; std::string segment(buffer.c_str(), buffer.size()); - url.path.push_back(segment); + url->path.push_back(segment); } buffer.clear(); if (ch == '?') { - SET_HAVE_QUERY() + url->flags |= URL_FLAGS_HAS_QUERY; state = kQuery; } else if (ch == '#') { state = kFragment; @@ -1218,16 +1188,16 @@ namespace url { state = kFragment; break; default: - if (url.path.size() == 0) - url.path.push_back(""); - if (url.path.size() > 0 && ch != kEOL) - AppendOrEscape(&url.path[0], ch, SimpleEncodeSet); + if (url->path.size() == 0) + url->path.push_back(""); + if (url->path.size() > 0 && ch != kEOL) + AppendOrEscape(&url->path[0], ch, SimpleEncodeSet); } break; case kQuery: if (ch == kEOL || (!has_state_override && ch == '#')) { - SET_HAVE_QUERY() - url.query = buffer; + url->flags |= URL_FLAGS_HAS_QUERY; + url->query = buffer; buffer.clear(); if (ch == '#') state = kFragment; @@ -1238,8 +1208,8 @@ namespace url { case kFragment: switch (ch) { case kEOL: - SET_HAVE_FRAGMENT() - url.fragment = buffer; + url->flags |= URL_FLAGS_HAS_FRAGMENT; + url->fragment = buffer; break; case 0: break; @@ -1248,63 +1218,97 @@ namespace url { } break; default: - INVALID_PARSE_STATE() - goto done; + url->flags |= URL_FLAGS_INVALID_PARSE_STATE; + return; } p++; } + } + + static void Parse(Environment* env, + Local recv, + const char* input, + const size_t len, + enum url_parse_state state_override, + Local base_obj, + Local context_obj, + Local cb, + Local error_cb) { + Isolate* isolate = env->isolate(); + Local context = env->context(); + HandleScope handle_scope(isolate); + Context::Scope context_scope(context); - done: + const bool has_base = base_obj->IsObject(); + + struct url_data base; + struct url_data url; + if (context_obj->IsObject()) + HarvestContext(env, &url, context_obj.As()); + if (has_base) + HarvestBase(env, &base, base_obj.As()); + + URL::Parse(input, len, state_override, &url, &base, has_base); + if (url.flags & URL_FLAGS_INVALID_PARSE_STATE) + return; // Define the return value placeholders const Local undef = Undefined(isolate); - Local argv[9] = { - undef, - undef, - undef, - undef, - undef, - undef, - undef, - undef, - undef, - }; - - argv[ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); - if (!IS_FAILED(url.flags)) { - if (DOES_HAVE_SCHEME(url)) + if (!(url.flags & URL_FLAGS_FAILED)) { + Local argv[9] = { + undef, + undef, + undef, + undef, + undef, + undef, + undef, + undef, + undef, + }; + argv[ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); + if (url.flags & URL_FLAGS_HAS_SCHEME) argv[ARG_PROTOCOL] = OneByteString(isolate, url.scheme.c_str()); - if (DOES_HAVE_USERNAME(url)) + if (url.flags & URL_FLAGS_HAS_USERNAME) argv[ARG_USERNAME] = UTF8STRING(isolate, url.username); - if (DOES_HAVE_PASSWORD(url)) + if (url.flags & URL_FLAGS_HAS_PASSWORD) argv[ARG_PASSWORD] = UTF8STRING(isolate, url.password); - if (DOES_HAVE_HOST(url)) + if (url.flags & URL_FLAGS_HAS_HOST) argv[ARG_HOST] = UTF8STRING(isolate, url.host); - if (DOES_HAVE_QUERY(url)) + if (url.flags & URL_FLAGS_HAS_QUERY) argv[ARG_QUERY] = UTF8STRING(isolate, url.query); - if (DOES_HAVE_FRAGMENT(url)) + if (url.flags & URL_FLAGS_HAS_FRAGMENT) argv[ARG_FRAGMENT] = UTF8STRING(isolate, url.fragment); if (url.port > -1) argv[ARG_PORT] = Integer::New(isolate, url.port); - if (DOES_HAVE_PATH(url)) + if (url.flags & URL_FLAGS_HAS_PATH) argv[ARG_PATH] = Copy(env, url.path); + (void)cb->Call(context, recv, arraysize(argv), argv); + } else if (error_cb->IsFunction()) { + Local argv[2] = { undef, undef }; + argv[ERR_ARG_FLAGS] = Integer::NewFromUnsigned(isolate, url.flags); + argv[ERR_ARG_INPUT] = + String::NewFromUtf8(env->isolate(), + input, + v8::NewStringType::kNormal).ToLocalChecked(); + (void)error_cb.As()->Call(context, recv, arraysize(argv), argv); } - - (void)cb->Call(context, recv, 9, argv); } static void Parse(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); CHECK_GE(args.Length(), 5); - CHECK(args[0]->IsString()); - CHECK(args[2]->IsUndefined() || + CHECK(args[0]->IsString()); // input + CHECK(args[2]->IsUndefined() || // base context args[2]->IsNull() || args[2]->IsObject()); - CHECK(args[3]->IsUndefined() || + CHECK(args[3]->IsUndefined() || // context args[3]->IsNull() || args[3]->IsObject()); - CHECK(args[4]->IsFunction()); + CHECK(args[4]->IsFunction()); // complete callback + CHECK(args[5]->IsUndefined() || args[5]->IsFunction()); // error callback + Utf8Value input(env->isolate(), args[0]); enum url_parse_state state_override = kUnknownState; if (args[1]->IsNumber()) { @@ -1317,7 +1321,8 @@ namespace url { state_override, args[2], args[3], - args[4].As()); + args[4].As(), + args[5]); } static void EncodeAuthSet(const FunctionCallbackInfo& args) { diff --git a/src/node_url.h b/src/node_url.h index f0f54282580..d1dbe94c3d6 100644 --- a/src/node_url.h +++ b/src/node_url.h @@ -377,11 +377,11 @@ static inline unsigned hex2bin(const char ch) { return static_cast(-1); } -static inline int PercentDecode(const char* input, - size_t len, - std::string* dest) { +static inline void PercentDecode(const char* input, + size_t len, + std::string* dest) { if (len == 0) - return 0; + return; dest->reserve(len); const char* pointer = input; const char* end = input + len; @@ -400,11 +400,10 @@ static inline int PercentDecode(const char* input, unsigned a = hex2bin(pointer[1]); unsigned b = hex2bin(pointer[2]); char c = static_cast(a * 16 + b); - *dest += static_cast(c); + *dest += c; pointer += 3; } } - return 0; } #define SPECIALS(XX) \ @@ -465,6 +464,10 @@ static inline int PercentDecode(const char* input, XX(ARG_QUERY) \ XX(ARG_FRAGMENT) +#define ERR_ARGS(XX) \ + XX(ERR_ARG_FLAGS) \ + XX(ERR_ARG_INPUT) \ + static const char kEOL = -1; enum url_parse_state { @@ -472,19 +475,25 @@ enum url_parse_state { #define XX(name) name, PARSESTATES(XX) #undef XX -} url_parse_state; +}; enum url_flags { #define XX(name, val) name = val, FLAGS(XX) #undef XX -} url_flags; +}; enum url_cb_args { #define XX(name) name, ARGS(XX) #undef XX -} url_cb_args; +}; + +enum url_error_cb_args { +#define XX(name) name, + ERR_ARGS(XX) +#undef XX +} url_error_cb_args; static inline bool IsSpecial(std::string scheme) { #define XX(name, _) if (scheme == name) return true; @@ -530,6 +539,91 @@ struct url_host { url_host_value value; enum url_host_type type; }; + +class URL { + public: + static void Parse(const char* input, + const size_t len, + enum url_parse_state state_override, + struct url_data* url, + const struct url_data* base, + bool has_base); + + URL(const char* input, const size_t len) { + Parse(input, len, kUnknownState, &context_, nullptr, false); + } + + URL(const char* input, const size_t len, const URL* base) { + if (base != nullptr) + Parse(input, len, kUnknownState, &context_, &(base->context_), true); + else + Parse(input, len, kUnknownState, &context_, nullptr, false); + } + + URL(const char* input, const size_t len, + const char* base, const size_t baselen) { + if (base != nullptr && baselen > 0) { + URL _base(base, baselen); + Parse(input, len, kUnknownState, &context_, &(_base.context_), true); + } else { + Parse(input, len, kUnknownState, &context_, nullptr, false); + } + } + + explicit URL(std::string input) : + URL(input.c_str(), input.length()) {} + + URL(std::string input, const URL* base) : + URL(input.c_str(), input.length(), base) {} + + URL(std::string input, std::string base) : + URL(input.c_str(), input.length(), base.c_str(), base.length()) {} + + int32_t flags() { + return context_.flags; + } + + int port() { + return context_.port; + } + + const std::string& protocol() const { + return context_.scheme; + } + + const std::string& username() const { + return context_.username; + } + + const std::string& password() const { + return context_.password; + } + + const std::string& host() const { + return context_.host; + } + + const std::string& query() const { + return context_.query; + } + + const std::string& fragment() const { + return context_.fragment; + } + + std::string path() { + std::string ret; + for (auto i = context_.path.begin(); i != context_.path.end(); i++) { + ret += '/'; + ret += *i; + } + return ret; + } + + private: + struct url_data context_; +}; + } // namespace url } // namespace node diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index 70e43ce3c6c..408c126995e 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -458,6 +458,12 @@ void TLSWrap::ClearOut() { memcpy(buf.base, current, avail); OnRead(avail, &buf); + // Caveat emptor: OnRead() calls into JS land which can result in + // the SSL context object being destroyed. We have to carefully + // check that ssl_ != nullptr afterwards. + if (ssl_ == nullptr) + return; + read -= avail; current += avail; } diff --git a/test/README.md b/test/README.md index 653f78cc6d2..ae549420624 100644 --- a/test/README.md +++ b/test/README.md @@ -376,24 +376,12 @@ Path to the 'root' directory. either `/` or `c:\\` (windows) Logs '1..0 # Skipped: ' + `msg` -### spawnCat(options) -* `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) -* return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) - -Platform normalizes the `cat` command. - ### spawnPwd(options) * `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) * return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) Platform normalizes the `pwd` command. -### spawnSyncCat(options) -* `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) -* return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) - -Synchronous version of `spawnCat`. - ### spawnSyncPwd(options) * `options` [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) * return [<Object>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) diff --git a/test/common.js b/test/common.js index acf906d2cd0..c5c6b904dbb 100644 --- a/test/common.js +++ b/test/common.js @@ -262,28 +262,6 @@ exports.ddCommand = function(filename, kilobytes) { }; -exports.spawnCat = function(options) { - const spawn = require('child_process').spawn; - - if (exports.isWindows) { - return spawn('more', [], options); - } else { - return spawn('cat', [], options); - } -}; - - -exports.spawnSyncCat = function(options) { - const spawnSync = require('child_process').spawnSync; - - if (exports.isWindows) { - return spawnSync('more', [], options); - } else { - return spawnSync('cat', [], options); - } -}; - - exports.spawnPwd = function(options) { const spawn = require('child_process').spawn; diff --git a/test/fixtures/spawn_closed_stdio.py b/test/fixtures/spawn_closed_stdio.py new file mode 100644 index 00000000000..b5de2552c2b --- /dev/null +++ b/test/fixtures/spawn_closed_stdio.py @@ -0,0 +1,8 @@ +import os +import sys +import subprocess +os.close(0) +os.close(1) +os.close(2) +exit_code = subprocess.call(sys.argv[1:], shell=False) +sys.exit(exit_code) diff --git a/test/fixtures/url-searchparams.js b/test/fixtures/url-searchparams.js new file mode 100644 index 00000000000..3b186fc97bc --- /dev/null +++ b/test/fixtures/url-searchparams.js @@ -0,0 +1,68 @@ +module.exports = [ + ['', '', []], + [ + 'foo=918854443121279438895193', + 'foo=918854443121279438895193', + [['foo', '918854443121279438895193']] + ], + ['foo=bar', 'foo=bar', [['foo', 'bar']]], + ['foo=bar&foo=quux', 'foo=bar&foo=quux', [['foo', 'bar'], ['foo', 'quux']]], + ['foo=1&bar=2', 'foo=1&bar=2', [['foo', '1'], ['bar', '2']]], + [ + "my%20weird%20field=q1!2%22'w%245%267%2Fz8)%3F", + 'my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F', + [['my weird field', 'q1!2"\'w$5&7/z8)?']] + ], + ['foo%3Dbaz=bar', 'foo%3Dbaz=bar', [['foo=baz', 'bar']]], + ['foo=baz=bar', 'foo=baz%3Dbar', [['foo', 'baz=bar']]], + [ + 'str=foo&arr=1&somenull&arr=2&undef=&arr=3', + 'str=foo&arr=1&somenull=&arr=2&undef=&arr=3', + [ + ['str', 'foo'], + ['arr', '1'], + ['somenull', ''], + ['arr', '2'], + ['undef', ''], + ['arr', '3'] + ] + ], + [' foo = bar ', '+foo+=+bar+', [[' foo ', ' bar ']]], + ['foo=%zx', 'foo=%25zx', [['foo', '%zx']]], + ['foo=%EF%BF%BD', 'foo=%EF%BF%BD', [['foo', '\ufffd']]], + // See: https://github.com/joyent/node/issues/3058 + ['foo&bar=baz', 'foo=&bar=baz', [['foo', ''], ['bar', 'baz']]], + ['a=b&c&d=e', 'a=b&c=&d=e', [['a', 'b'], ['c', ''], ['d', 'e']]], + ['a=b&c=&d=e', 'a=b&c=&d=e', [['a', 'b'], ['c', ''], ['d', 'e']]], + ['a=b&=c&d=e', 'a=b&=c&d=e', [['a', 'b'], ['', 'c'], ['d', 'e']]], + ['a=b&=&d=e', 'a=b&=&d=e', [['a', 'b'], ['', ''], ['d', 'e']]], + ['&&foo=bar&&', 'foo=bar', [['foo', 'bar']]], + ['&', '', []], + ['&&&&', '', []], + ['&=&', '=', [['', '']]], + ['&=&=', '=&=', [['', ''], ['', '']]], + ['=', '=', [['', '']]], + ['+', '+=', [[' ', '']]], + ['+=', '+=', [[' ', '']]], + ['=+', '=+', [['', ' ']]], + ['+=&', '+=', [[' ', '']]], + ['a&&b', 'a=&b=', [['a', ''], ['b', '']]], + ['a=a&&b=b', 'a=a&b=b', [['a', 'a'], ['b', 'b']]], + ['&a', 'a=', [['a', '']]], + ['&=', '=', [['', '']]], + ['a&a&', 'a=&a=', [['a', ''], ['a', '']]], + ['a&a&a&', 'a=&a=&a=', [['a', ''], ['a', ''], ['a', '']]], + ['a&a&a&a&', 'a=&a=&a=&a=', [['a', ''], ['a', ''], ['a', ''], ['a', '']]], + ['a=&a=value&a=', 'a=&a=value&a=', [['a', ''], ['a', 'value'], ['a', '']]], + ['foo%20bar=baz%20quux', 'foo+bar=baz+quux', [['foo bar', 'baz quux']]], + ['+foo=+bar', '+foo=+bar', [[' foo', ' bar']]], + [ + // fake percent encoding + 'foo=%©ar&baz=%A©uux&xyzzy=%©ud', + 'foo=%25%C2%A9ar&baz=%25A%C2%A9uux&xyzzy=%25%C2%A9ud', + [['foo', '%©ar'], ['baz', '%A©uux'], ['xyzzy', '%©ud']] + ], + // always preserve order of key-value pairs + ['a=1&b=2&a=3', 'a=1&b=2&a=3', [['a', '1'], ['b', '2'], ['a', '3']]], + ['?a', '%3Fa=', [['?a', '']]] +]; diff --git a/test/fixtures/url-setter-tests.json b/test/fixtures/url-setter-tests.json index 4876b9940c1..d0138204b3f 100644 --- a/test/fixtures/url-setter-tests.json +++ b/test/fixtures/url-setter-tests.json @@ -111,6 +111,55 @@ "protocol": "http:" } }, + { + "href": "gopher://example.net:1234", + "new_value": "file", + "expected": { + "href": "gopher://example.net:1234/", + "protocol": "gopher:" + } + }, + { + "href": "wss://x:x@example.net:1234", + "new_value": "file", + "expected": { + "href": "wss://x:x@example.net:1234/", + "protocol": "wss:" + } + }, + { + "comment": "Can’t switch from file URL with no host", + "href": "file://localhost/", + "new_value": "http", + "expected": { + "href": "file:///", + "protocol": "file:" + } + }, + { + "href": "file:///test", + "new_value": "gopher", + "expected": { + "href": "file:///test", + "protocol": "file:" + } + }, + { + "href": "file:", + "new_value": "wss", + "expected": { + "href": "file:///", + "protocol": "file:" + } + }, + { + "href": "file://hi/path", + "new_value": "s", + "expected": { + "href": "file://hi/path", + "protocol": "file:" + } + }, { "href": "https://example.net", "new_value": "s", @@ -1177,4 +1226,4 @@ } } ] -} \ No newline at end of file +} diff --git a/test/known_issues/test-vm-getters.js b/test/known_issues/test-vm-getters.js index f815e6d6583..af27eeee644 100644 --- a/test/known_issues/test-vm-getters.js +++ b/test/known_issues/test-vm-getters.js @@ -16,4 +16,9 @@ const context = vm.createContext(sandbox); const code = 'Object.getOwnPropertyDescriptor(this, "prop");'; const result = vm.runInContext(code, context); -assert.strictEqual(result, descriptor); +// Ref: https://github.com/nodejs/node/issues/11803 + +assert.deepStrictEqual(Object.keys(result), Object.keys(descriptor)); +for (const prop of Object.keys(result)) { + assert.strictEqual(result[prop], descriptor[prop]); +} diff --git a/test/parallel/test-binding-constants.js b/test/parallel/test-binding-constants.js new file mode 100644 index 00000000000..fcc95c21a3f --- /dev/null +++ b/test/parallel/test-binding-constants.js @@ -0,0 +1,30 @@ +'use strict'; + +require('../common'); +const constants = process.binding('constants'); +const assert = require('assert'); + +assert.deepStrictEqual( + Object.keys(constants).sort(), ['crypto', 'fs', 'os', 'zlib'] +); + +assert.deepStrictEqual( + Object.keys(constants.os).sort(), ['UV_UDP_REUSEADDR', 'errno', 'signals'] +); + +// Make sure all the constants objects don't inherit from Object.prototype +const inheritedProperties = Object.getOwnPropertyNames(Object.prototype); +function test(obj) { + assert(obj); + assert.strictEqual(Object.prototype.toString.call(obj), '[object Object]'); + assert.strictEqual(Object.getPrototypeOf(obj), null); + + inheritedProperties.forEach((property) => { + assert.strictEqual(property in obj, false); + }); +} + +[ + constants, constants.crypto, constants.fs, constants.os, constants.zlib, + constants.os.errno, constants.os.signals +].forEach(test); diff --git a/test/parallel/test-buffer-write-noassert.js b/test/parallel/test-buffer-write-noassert.js index 7423e462ca1..10e9fd8b766 100644 --- a/test/parallel/test-buffer-write-noassert.js +++ b/test/parallel/test-buffer-write-noassert.js @@ -4,6 +4,8 @@ const assert = require('assert'); // testing buffer write functions +const outOfRange = /^RangeError: (?:Index )?out of range(?: index)?$/; + function write(funx, args, result, res) { { const buf = Buffer.alloc(9); @@ -11,14 +13,8 @@ function write(funx, args, result, res) { assert.deepStrictEqual(buf, res); } - { - const invalidArgs = Array.from(args); - invalidArgs[1] = -1; - assert.throws( - () => Buffer.alloc(9)[funx](...invalidArgs), - /^RangeError: (?:Index )?out of range(?: index)?$/ - ); - } + writeInvalidOffset(-1); + writeInvalidOffset(9); if (!/Int/.test(funx)) { assert.throws( @@ -33,6 +29,15 @@ function write(funx, args, result, res) { assert.deepStrictEqual(buf2, res); } + function writeInvalidOffset(offset) { + const newArgs = Array.from(args); + newArgs[1] = offset; + assert.throws(() => Buffer.alloc(9)[funx](...newArgs), outOfRange); + + const buf = Buffer.alloc(9); + buf[funx](...newArgs, true); + assert.deepStrictEqual(buf, Buffer.alloc(9)); + } } write('writeInt8', [1, 0], 1, Buffer.from([1, 0, 0, 0, 0, 0, 0, 0, 0])); @@ -53,3 +58,45 @@ write('writeDoubleBE', [1, 1], 9, Buffer.from([0, 63, 240, 0, 0, 0, 0, 0, 0])); write('writeDoubleLE', [1, 1], 9, Buffer.from([0, 0, 0, 0, 0, 0, 0, 240, 63])); write('writeFloatBE', [1, 1], 5, Buffer.from([0, 63, 128, 0, 0, 0, 0, 0, 0])); write('writeFloatLE', [1, 1], 5, Buffer.from([0, 0, 0, 128, 63, 0, 0, 0, 0])); + +function writePartial(funx, args, result, res) { + assert.throws(() => Buffer.alloc(9)[funx](...args), outOfRange); + const buf = Buffer.alloc(9); + assert.strictEqual(buf[funx](...args, true), result); + assert.deepStrictEqual(buf, res); +} + +// Test partial writes (cases where the buffer isn't large enough to hold the +// entire value, but is large enough to hold parts of it). +writePartial('writeIntBE', [0x0eadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0x0e, 0xad, 0xbe])); +writePartial('writeIntLE', [0x0eadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeInt16BE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x12])); +writePartial('writeInt16LE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x34])); +writePartial('writeInt32BE', [0x0eadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0x0e, 0xad, 0xbe])); +writePartial('writeInt32LE', [0x0eadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeUIntBE', [0xdeadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xde, 0xad, 0xbe])); +writePartial('writeUIntLE', [0xdeadbeef, 6, 4], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeUInt16BE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x12])); +writePartial('writeUInt16LE', [0x1234, 8], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0x34])); +writePartial('writeUInt32BE', [0xdeadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xde, 0xad, 0xbe])); +writePartial('writeUInt32LE', [0xdeadbeef, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0xef, 0xbe, 0xad])); +writePartial('writeDoubleBE', [1, 2], 10, + Buffer.from([0, 0, 63, 240, 0, 0, 0, 0, 0])); +writePartial('writeDoubleLE', [1, 2], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 240])); +writePartial('writeFloatBE', [1, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 63, 128, 0])); +writePartial('writeFloatLE', [1, 6], 10, + Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 128])); diff --git a/test/parallel/test-child-process-execfile.js b/test/parallel/test-child-process-execfile.js new file mode 100644 index 00000000000..ab36aa7b156 --- /dev/null +++ b/test/parallel/test-child-process-execfile.js @@ -0,0 +1,21 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const execFile = require('child_process').execFile; +const path = require('path'); + +const fixture = path.join(common.fixturesDir, 'exit.js'); + +{ + execFile( + process.execPath, + [fixture, 42], + common.mustCall((e) => { + // Check that arguments are included in message + assert.strictEqual(e.message.trim(), + `Command failed: ${process.execPath} ${fixture} 42`); + assert.strictEqual(e.code, 42); + }) + ); +} diff --git a/test/parallel/test-child-process-spawn-shell.js b/test/parallel/test-child-process-spawn-shell.js index 444d1eb063c..e9dd0e07efa 100644 --- a/test/parallel/test-child-process-spawn-shell.js +++ b/test/parallel/test-child-process-spawn-shell.js @@ -34,7 +34,7 @@ echo.on('close', common.mustCall((code, signal) => { })); // Verify that shell features can be used -const cmd = common.isWindows ? 'echo bar | more' : 'echo bar | cat'; +const cmd = 'echo bar | cat'; const command = cp.spawn(cmd, { encoding: 'utf8', shell: true diff --git a/test/parallel/test-child-process-spawnsync-shell.js b/test/parallel/test-child-process-spawnsync-shell.js index 1d92767a8b5..9e03ee126f0 100644 --- a/test/parallel/test-child-process-spawnsync-shell.js +++ b/test/parallel/test-child-process-spawnsync-shell.js @@ -23,7 +23,7 @@ assert.strictEqual(echo.args[echo.args.length - 1].replace(/"/g, ''), assert.strictEqual(echo.stdout.toString().trim(), 'foo'); // Verify that shell features can be used -const cmd = common.isWindows ? 'echo bar | more' : 'echo bar | cat'; +const cmd = 'echo bar | cat'; const command = cp.spawnSync(cmd, {shell: true}); assert.strictEqual(command.stdout.toString().trim(), 'bar'); diff --git a/test/parallel/test-child-process-stdin.js b/test/parallel/test-child-process-stdin.js index 4f5352438d2..8a0a2bcfaea 100644 --- a/test/parallel/test-child-process-stdin.js +++ b/test/parallel/test-child-process-stdin.js @@ -25,7 +25,7 @@ const assert = require('assert'); const spawn = require('child_process').spawn; -const cat = spawn(common.isWindows ? 'more' : 'cat'); +const cat = spawn('cat'); cat.stdin.write('hello'); cat.stdin.write(' '); cat.stdin.write('world'); @@ -54,9 +54,5 @@ cat.on('exit', common.mustCall(function(status) { })); cat.on('close', common.mustCall(function() { - if (common.isWindows) { - assert.strictEqual('hello world\r\n', response); - } else { - assert.strictEqual('hello world', response); - } + assert.strictEqual('hello world', response); })); diff --git a/test/parallel/test-child-process-stdio-inherit.js b/test/parallel/test-child-process-stdio-inherit.js index 4318ba3ee17..b77391d87dc 100644 --- a/test/parallel/test-child-process-stdio-inherit.js +++ b/test/parallel/test-child-process-stdio-inherit.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; @@ -52,5 +52,5 @@ function grandparent() { function parent() { // should not immediately exit. - common.spawnCat({ stdio: 'inherit' }); + spawn('cat', [], { stdio: 'inherit' }); } diff --git a/test/parallel/test-child-process-stdio.js b/test/parallel/test-child-process-stdio.js index b7193b2f591..00c002e89d6 100644 --- a/test/parallel/test-child-process-stdio.js +++ b/test/parallel/test-child-process-stdio.js @@ -22,6 +22,7 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); +const spawnSync = require('child_process').spawnSync; let options = {stdio: ['pipe']}; let child = common.spawnPwd(options); @@ -36,7 +37,7 @@ assert.strictEqual(child.stdout, null); assert.strictEqual(child.stderr, null); options = {stdio: 'ignore'}; -child = common.spawnSyncCat(options); +child = spawnSync('cat', [], options); assert.deepStrictEqual(options, {stdio: 'ignore'}); assert.throws(() => { diff --git a/test/parallel/test-event-emitter-max-listeners.js b/test/parallel/test-event-emitter-max-listeners.js index c0c763f2ad5..6219d64a51a 100644 --- a/test/parallel/test-event-emitter-max-listeners.js +++ b/test/parallel/test-event-emitter-max-listeners.js @@ -30,16 +30,13 @@ e.on('maxListeners', common.mustCall(function() {})); // Should not corrupt the 'maxListeners' queue. e.setMaxListeners(42); -assert.throws(function() { - e.setMaxListeners(NaN); -}, /^TypeError: "n" argument must be a positive number$/); +const throwsObjs = [NaN, -1, 'and even this']; -assert.throws(function() { - e.setMaxListeners(-1); -}, /^TypeError: "n" argument must be a positive number$/); - -assert.throws(function() { - e.setMaxListeners('and even this'); -}, /^TypeError: "n" argument must be a positive number$/); +for (const obj of throwsObjs) { + assert.throws(() => e.setMaxListeners(obj), + /^TypeError: "n" argument must be a positive number$/); + assert.throws(() => events.defaultMaxListeners = obj, + /^TypeError: "defaultMaxListeners" must be a positive number$/); +} e.emit('maxListeners'); diff --git a/test/parallel/test-fs-timestamp-parsing-error.js b/test/parallel/test-fs-timestamp-parsing-error.js new file mode 100644 index 00000000000..42bbe45d511 --- /dev/null +++ b/test/parallel/test-fs-timestamp-parsing-error.js @@ -0,0 +1,17 @@ +'use strict'; +require('../common'); +const fs = require('fs'); +const assert = require('assert'); + +[Infinity, -Infinity, NaN].forEach((input) => { + assert.throws(() => fs._toUnixTimestamp(input), + new RegExp('^Error: Cannot parse time: ' + input + '$')); +}); + +assert.throws(() => fs._toUnixTimestamp({}), + /^Error: Cannot parse time: \[object Object\]$/); + +const okInputs = [1, -1, '1', '-1', Date.now()]; +okInputs.forEach((input) => { + assert.doesNotThrow(() => fs._toUnixTimestamp(input)); +}); diff --git a/test/parallel/test-fs-utimes.js b/test/parallel/test-fs-utimes.js index 2d00d0eaed5..456f09b8041 100644 --- a/test/parallel/test-fs-utimes.js +++ b/test/parallel/test-fs-utimes.js @@ -143,17 +143,15 @@ function testIt(atime, mtime, callback) { const stats = fs.statSync(__filename); // run tests -const runTest = common.mustCall(testIt, 6); +const runTest = common.mustCall(testIt, 5); runTest(new Date('1982-09-10 13:37'), new Date('1982-09-10 13:37'), function() { runTest(new Date(), new Date(), function() { runTest(123456.789, 123456.789, function() { runTest(stats.mtime, stats.mtime, function() { - runTest(NaN, Infinity, function() { - runTest('123456', -1, common.mustCall(function() { - // done - })); - }); + runTest('123456', -1, common.mustCall(function() { + // done + })); }); }); }); diff --git a/test/parallel/test-https-foafssl.js b/test/parallel/test-https-foafssl.js index 8b711b81fee..661b1961527 100644 --- a/test/parallel/test-https-foafssl.js +++ b/test/parallel/test-https-foafssl.js @@ -42,7 +42,8 @@ const https = require('https'); const options = { key: fs.readFileSync(common.fixturesDir + '/agent.key'), cert: fs.readFileSync(common.fixturesDir + '/agent.crt'), - requestCert: true + requestCert: true, + rejectUnauthorized: false }; const modulus = 'A6F44A9C25791431214F5C87AF9E040177A8BB89AC803F7E09BBC3A5519F' + diff --git a/test/parallel/test-net-options-lookup.js b/test/parallel/test-net-options-lookup.js new file mode 100644 index 00000000000..da23830b12a --- /dev/null +++ b/test/parallel/test-net-options-lookup.js @@ -0,0 +1,34 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const net = require('net'); + +const expectedError = /^TypeError: "lookup" option should be a function$/; + +['foobar', 1, {}, []].forEach((input) => connectThrows(input)); + +function connectThrows(input) { + const opts = { + host: 'localhost', + port: common.PORT, + lookup: input + }; + + assert.throws(function() { + net.connect(opts); + }, expectedError); +} + +[() => {}].forEach((input) => connectDoesNotThrow(input)); + +function connectDoesNotThrow(input) { + const opts = { + host: 'localhost', + port: common.PORT, + lookup: input + }; + + assert.doesNotThrow(function() { + net.connect(opts); + }); +} diff --git a/test/parallel/test-process-env.js b/test/parallel/test-process-env.js index 7d6434c00b8..69379f60061 100644 --- a/test/parallel/test-process-env.js +++ b/test/parallel/test-process-env.js @@ -70,7 +70,6 @@ if (process.argv[2] === 'you-are-the-child') { delete process.env.NON_EXISTING_VARIABLE; assert.strictEqual(true, delete process.env.NON_EXISTING_VARIABLE); -/* eslint-disable max-len */ /* For the moment we are not going to support setting the timezone via the * environment variables. The problem is that various V8 platform backends * deal with timezone in different ways. The windows platform backend caches @@ -87,7 +86,6 @@ date = new Date('Fri, 10 Sep 1982 03:15:00 GMT'); assert.strictEqual(3, date.getUTCHours()); assert.strictEqual(5, date.getHours()); */ -/* eslint-enable max-len */ // Environment variables should be case-insensitive on Windows, and // case-sensitive on other platforms. diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index 4a0fb9d0b90..cec71a45b66 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -326,14 +326,14 @@ function isWarned(emitter) { return false; }); - // duplicate lines are removed from history when `options.deDupeHistory` - // is `true` + // duplicate lines are removed from history when + // `options.removeHistoryDuplicates` is `true` fi = new FakeInput(); rli = new readline.Interface({ input: fi, output: fi, terminal: true, - deDupeHistory: true + removeHistoryDuplicates: true }); expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; callCount = 0; @@ -356,14 +356,14 @@ function isWarned(emitter) { assert.strictEqual(callCount, 0); rli.close(); - // duplicate lines are not removed from history when `options.deDupeHistory` - // is `false` + // duplicate lines are not removed from history when + // `options.removeHistoryDuplicates` is `false` fi = new FakeInput(); rli = new readline.Interface({ input: fi, output: fi, terminal: true, - deDupeHistory: false + removeHistoryDuplicates: false }); expectedLines = ['foo', 'bar', 'baz', 'bar', 'bat', 'bat']; callCount = 0; diff --git a/test/parallel/test-repl-empty.js b/test/parallel/test-repl-empty.js index e9794d9bea4..44281f117f0 100644 --- a/test/parallel/test-repl-empty.js +++ b/test/parallel/test-repl-empty.js @@ -9,7 +9,7 @@ const repl = require('repl'); const options = { eval: common.mustCall((cmd, context) => { // Assertions here will not cause the test to exit with an error code - // so set a boolean that is checked in process.on('exit',...) instead. + // so set a boolean that is checked later instead. evalCalledWithExpectedArgs = (cmd === '\n'); }) }; @@ -23,7 +23,5 @@ const repl = require('repl'); r.write('.exit\n'); } - process.on('exit', () => { - assert(evalCalledWithExpectedArgs); - }); + assert(evalCalledWithExpectedArgs); } diff --git a/test/parallel/test-repl-eval.js b/test/parallel/test-repl-eval.js index 1eb4e2371c7..7508e14d8bd 100644 --- a/test/parallel/test-repl-eval.js +++ b/test/parallel/test-repl-eval.js @@ -9,7 +9,7 @@ const repl = require('repl'); const options = { eval: common.mustCall((cmd, context) => { // Assertions here will not cause the test to exit with an error code - // so set a boolean that is checked in process.on('exit',...) instead. + // so set a boolean that is checked later instead. evalCalledWithExpectedArgs = (cmd === 'function f() {}\n' && context.foo === 'bar'); }) @@ -29,7 +29,5 @@ const repl = require('repl'); r.write('.exit\n'); } - process.on('exit', () => { - assert(evalCalledWithExpectedArgs); - }); + assert(evalCalledWithExpectedArgs); } diff --git a/test/parallel/test-stdio-closed.js b/test/parallel/test-stdio-closed.js index 98e4f980d50..2313140a26a 100644 --- a/test/parallel/test-stdio-closed.js +++ b/test/parallel/test-stdio-closed.js @@ -3,9 +3,21 @@ const common = require('../common'); const assert = require('assert'); const spawn = require('child_process').spawn; const fs = require('fs'); +const path = require('path'); if (common.isWindows) { - common.skip('platform not supported.'); + if (process.argv[2] === 'child') { + process.stdin; + process.stdout; + process.stderr; + return; + } + const python = process.env.PYTHON || 'python'; + const script = path.join(common.fixturesDir, 'spawn_closed_stdio.py'); + const proc = spawn(python, [script, process.execPath, __filename, 'child']); + proc.on('exit', common.mustCall(function(exitCode) { + assert.strictEqual(exitCode, 0); + })); return; } diff --git a/test/parallel/test-timers-unrefed-in-callback.js b/test/parallel/test-timers-unrefed-in-callback.js new file mode 100644 index 00000000000..c6fcb227899 --- /dev/null +++ b/test/parallel/test-timers-unrefed-in-callback.js @@ -0,0 +1,61 @@ +'use strict'; +// Checks that setInterval timers keep running even when they're +// unrefed within their callback. + +require('../common'); +const assert = require('assert'); +const net = require('net'); + +let counter1 = 0; +let counter2 = 0; + +// Test1 checks that clearInterval works as expected for a timer +// unrefed within its callback: it removes the timer and its callback +// is not called anymore. Note that the only reason why this test is +// robust is that: +// 1. the repeated timer it creates has a delay of 1ms +// 2. when this test is completed, another test starts that creates a +// new repeated timer with the same delay (1ms) +// 3. because of the way timers are implemented in libuv, if two +// repeated timers A and B are created in that order with the same +// delay, it is guaranteed that the first occurrence of timer A +// will fire before the first occurrence of timer B +// 4. as a result, when the timer created by Test2 fired 11 times, if +// the timer created by Test1 hadn't been removed by clearInterval, +// it would have fired 11 more times, and the assertion in the +// process'exit event handler would fail. +function Test1() { + // server only for maintaining event loop + const server = net.createServer().listen(0); + + const timer1 = setInterval(() => { + timer1.unref(); + if (counter1++ === 3) { + clearInterval(timer1); + server.close(() => { + Test2(); + }); + } + }, 1); +} + + +// Test2 checks setInterval continues even if it is unrefed within +// timer callback. counter2 continues to be incremented more than 11 +// until server close completed. +function Test2() { + // server only for maintaining event loop + const server = net.createServer().listen(0); + + const timer2 = setInterval(() => { + timer2.unref(); + if (counter2++ === 3) + server.close(); + }, 1); +} + +process.on('exit', () => { + assert.strictEqual(counter1, 4); +}); + +Test1(); diff --git a/test/parallel/test-tls-securepair-leak.js b/test/parallel/test-tls-securepair-leak.js new file mode 100644 index 00000000000..b513bcd4c7c --- /dev/null +++ b/test/parallel/test-tls-securepair-leak.js @@ -0,0 +1,29 @@ +// Flags: --expose-gc --no-deprecation +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} + +const { createSecureContext } = require('tls'); +const { createSecurePair } = require('_tls_legacy'); + +const before = process.memoryUsage().external; +{ + const context = createSecureContext(); + const options = {}; + for (let i = 0; i < 1e4; i += 1) + createSecurePair(context, false, false, false, options).destroy(); +} +global.gc(); +const after = process.memoryUsage().external; + +// It's not an exact science but a SecurePair grows .external by about 45 kB. +// Unless AdjustAmountOfExternalAllocatedMemory() is called on destruction, +// 10,000 instances make it grow by well over 400 MB. Allow for some slop +// because objects like buffers also affect the external limit. +assert(after - before < 25 << 20); diff --git a/test/parallel/test-tls-session-cache.js b/test/parallel/test-tls-session-cache.js index f555da842bb..887c36d4c5b 100644 --- a/test/parallel/test-tls-session-cache.js +++ b/test/parallel/test-tls-session-cache.js @@ -56,7 +56,8 @@ function doTest(testOptions, callback) { key: key, cert: cert, ca: [cert], - requestCert: true + requestCert: true, + rejectUnauthorized: false }; let requestCount = 0; let resumeCount = 0; diff --git a/test/parallel/test-tls-socket-close.js b/test/parallel/test-tls-socket-close.js index 440c0c4ff7c..617062a4f2d 100644 --- a/test/parallel/test-tls-socket-close.js +++ b/test/parallel/test-tls-socket-close.js @@ -1,5 +1,9 @@ 'use strict'; const common = require('../common'); +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} const assert = require('assert'); const tls = require('tls'); @@ -9,35 +13,46 @@ const net = require('net'); const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); -const T = 100; - +let tlsSocket; // tls server const tlsServer = tls.createServer({ cert, key }, (socket) => { - setTimeout(() => { - socket.on('error', (error) => { - assert.strictEqual(error.code, 'EINVAL'); - tlsServer.close(); - netServer.close(); - }); - socket.write('bar'); - }, T * 2); + tlsSocket = socket; + socket.on('error', common.mustCall((error) => { + assert.strictEqual(error.code, 'EINVAL'); + tlsServer.close(); + netServer.close(); + })); }); +let netSocket; // plain tcp server const netServer = net.createServer((socket) => { - // if client wants to use tls + // if client wants to use tls tlsServer.emit('connection', socket); - socket.setTimeout(T, () => { - // this breaks if TLSSocket is already managing the socket: - socket.destroy(); - }); + netSocket = socket; }).listen(0, common.mustCall(function() { - // connect client tls.connect({ host: 'localhost', port: this.address().port, rejectUnauthorized: false - }).write('foo'); + }).write('foo', 'utf8', common.mustCall(() => { + assert(netSocket); + netSocket.setTimeout(1, common.mustCall(() => { + assert(tlsSocket); + // this breaks if TLSSocket is already managing the socket: + netSocket.destroy(); + const interval = setInterval(() => { + // Checking this way allows us to do the write at a time that causes a + // segmentation fault (not always, but often) in Node.js 7.7.3 and + // earlier. If we instead, for example, wait on the `close` event, then + // it will not segmentation fault, which is what this test is all about. + if (tlsSocket._handle._parent.bytesRead === 0) { + tlsSocket.write('bar'); + clearInterval(interval); + } + }, 1); + })); + })); })); diff --git a/test/parallel/test-tls-socket-destroy.js b/test/parallel/test-tls-socket-destroy.js new file mode 100644 index 00000000000..27651f8ec72 --- /dev/null +++ b/test/parallel/test-tls-socket-destroy.js @@ -0,0 +1,36 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); + return; +} + +const fs = require('fs'); +const net = require('net'); +const tls = require('tls'); + +const key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'); +const cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem'); +const secureContext = tls.createSecureContext({ key, cert }); + +const server = net.createServer(common.mustCall((conn) => { + const options = { isServer: true, secureContext, server }; + const socket = new tls.TLSSocket(conn, options); + socket.once('data', common.mustCall(() => { + socket._destroySSL(); // Should not crash. + server.close(); + })); +})); + +server.listen(0, function() { + const options = { + port: this.address().port, + rejectUnauthorized: false, + }; + tls.connect(options, function() { + this.write('*'.repeat(1 << 20)); // Write more data than fits in a frame. + this.on('error', this.destroy); // Server closes connection on us. + }); +}); diff --git a/test/parallel/test-url-parse-format.js b/test/parallel/test-url-parse-format.js index f75f979b29e..7da6a7167b5 100644 --- a/test/parallel/test-url-parse-format.js +++ b/test/parallel/test-url-parse-format.js @@ -1,4 +1,3 @@ -/* eslint-disable max-len */ 'use strict'; require('../common'); const assert = require('assert'); @@ -272,8 +271,7 @@ const parseTests = { }, 'http://user:pass@mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=': { - href: 'http://user:pass@mt0.google.com/vt/lyrs=m@114???' + - '&hl=en&src=api&x=2&y=2&z=3&s=', + href: 'http://user:pass@mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=', protocol: 'http:', slashes: true, host: 'mt0.google.com', @@ -842,7 +840,7 @@ const parseTests = { hostname: 'a.b', hash: null, pathname: '/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E', - path: '/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', + path: '/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', // eslint-disable-line max-len search: '?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', query: 'mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz', href: 'http://a.b/%09bc%0Adr%0Def%20g%22hq%27j%3Ckl%3E?mn%5Cop%5Eq=r%6099%7Bst%7Cuv%7Dwz' diff --git a/test/parallel/test-whatwg-url-parsing.js b/test/parallel/test-whatwg-url-parsing.js index e4f8306ead8..4d04c3194e1 100644 --- a/test/parallel/test-whatwg-url-parsing.js +++ b/test/parallel/test-whatwg-url-parsing.js @@ -13,15 +13,30 @@ if (!common.hasIntl) { // Tests below are not from WPT. const tests = require(path.join(common.fixturesDir, 'url-tests')); +const failureTests = tests.filter((test) => test.failure).concat([ + { input: '' }, + { input: 'test' }, + { input: undefined }, + { input: 0 }, + { input: true }, + { input: false }, + { input: null }, + { input: new Date() }, + { input: new RegExp() }, + { input: () => {} } +]); -for (const test of tests) { - if (typeof test === 'string') - continue; - - if (test.failure) { - assert.throws(() => new URL(test.input, test.base), - /^TypeError: Invalid URL$/); - } +for (const test of failureTests) { + assert.throws( + () => new URL(test.input, test.base), + (error) => { + // The input could be processed, so we don't do strict matching here + const match = (error + '').match(/^TypeError: Invalid URL: (.*)$/); + if (!match) { + return false; + } + return error.input === match[1]; + }); } const additional_tests = require( diff --git a/test/parallel/test-whatwg-url-searchparams.js b/test/parallel/test-whatwg-url-searchparams.js index 7d6df646407..c7acb7d909d 100644 --- a/test/parallel/test-whatwg-url-searchparams.js +++ b/test/parallel/test-whatwg-url-searchparams.js @@ -1,8 +1,9 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); -const URL = require('url').URL; +const path = require('path'); +const { URL, URLSearchParams } = require('url'); // Tests below are not from WPT. const serialized = 'a=a&a=1&a=true&a=undefined&a=null&a=%EF%BF%BD' + @@ -77,3 +78,27 @@ assert.throws(() => sp.forEach(1), m.search = '?a=a&b=b'; assert.strictEqual(sp.toString(), 'a=a&b=b'); + +const tests = require(path.join(common.fixturesDir, 'url-searchparams.js')); + +for (const [input, expected, parsed] of tests) { + if (input[0] !== '?') { + const sp = new URLSearchParams(input); + assert.strictEqual(String(sp), expected); + assert.deepStrictEqual(Array.from(sp), parsed); + + m.search = input; + assert.strictEqual(String(m.searchParams), expected); + assert.deepStrictEqual(Array.from(m.searchParams), parsed); + } + + { + const sp = new URLSearchParams(`?${input}`); + assert.strictEqual(String(sp), expected); + assert.deepStrictEqual(Array.from(sp), parsed); + + m.search = `?${input}`; + assert.strictEqual(String(m.searchParams), expected); + assert.deepStrictEqual(Array.from(m.searchParams), parsed); + } +} diff --git a/tools/install.py b/tools/install.py index 6491adb89ed..97c6e707087 100755 --- a/tools/install.py +++ b/tools/install.py @@ -159,9 +159,6 @@ def headers(action): else: raise RuntimeError('Unknown engine: %s\n' % variables.get('node_engine')) - if 'false' == variables.get('node_shared_cares'): - subdir_files('deps/cares/include', 'include/node/', action) - if 'false' == variables.get('node_shared_libuv'): subdir_files('deps/uv/include', 'include/node/', action)