Remove deprecated scripts for adding head-matter to wt_config.xml, including Python and Bash implementations, to streamline configuration management.

This commit is contained in:
Torsten Schulz (local)
2025-12-04 16:34:45 +01:00
parent 4b674c7c60
commit 6e9116e819
13187 changed files with 1493219 additions and 337 deletions

131
node_modules/prebuild-install/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,131 @@
# Changelog
## [7.1.3] - 2025-01-22
### Fixed
- Bump napi-build-utils from 1 to 2 ([#204](https://github.com/prebuild/prebuild-install/issues/204)) ([`1bf4a15`](https://github.com/prebuild/prebuild-install/commit/1bf4a15)) (Bailey Pearson)
## [7.1.2] - 2024-02-29
### Fixed
- Support environments where MD5 is prohibited ([#191](https://github.com/prebuild/prebuild-install/issues/191)) ([`9140468`](https://github.com/prebuild/prebuild-install/commit/9140468)) (Tomasz Szuba)
## [7.1.1] - 2022-06-07
### Fixed
- Replace use of npmlog dependency with console.error ([#182](https://github.com/prebuild/prebuild-install/issues/182)) ([`4e2284c`](https://github.com/prebuild/prebuild-install/commit/4e2284c)) (Lovell Fuller)
- Ensure script output can be captured by tests ([#181](https://github.com/prebuild/prebuild-install/issues/181)) ([`d1853cb`](https://github.com/prebuild/prebuild-install/commit/d1853cb)) (Lovell Fuller)
## [7.1.0] - 2022-04-20
### Changed
- Allow setting libc to glibc on non-glibc platform ([#176](https://github.com/prebuild/prebuild-install/issues/176)) ([`f729abb`](https://github.com/prebuild/prebuild-install/commit/f729abb)) (Joona Heinikoski)
## [7.0.1] - 2022-01-28
### Changed
- Upgrade to the latest version of `detect-libc` ([#166](https://github.com/prebuild/prebuild-install/issues/166)) ([`f71c6b9`](https://github.com/prebuild/prebuild-install/commit/f71c6b9)) (Lovell Fuller)
## [7.0.0] - 2021-11-12
### Changed
- **Breaking:** bump `node-abi` so that Electron 14+ gets correct ABI ([#161](https://github.com/prebuild/prebuild-install/issues/161)) ([`477f347`](https://github.com/prebuild/prebuild-install/commit/477f347)) (csett86). Drops support of Node.js < 10.
- Bump `simple-get` ([`7468c14`](https://github.com/prebuild/prebuild-install/commit/7468c14)) (Vincent Weevers).
## [6.1.4] - 2021-08-11
### Fixed
- Move auth token to header instead of query param ([#160](https://github.com/prebuild/prebuild-install/issues/160)) ([`b3fad76`](https://github.com/prebuild/prebuild-install/commit/b3fad76)) (nicolai-nordic)
- Remove `_` prefix as it isn't allowed by npm config ([#153](https://github.com/prebuild/prebuild-install/issues/153)) ([`a964e5b`](https://github.com/prebuild/prebuild-install/commit/a964e5b)) (Tom Boothman)
- Make `rc.path` absolute ([#158](https://github.com/prebuild/prebuild-install/issues/158)) ([`57bcc06`](https://github.com/prebuild/prebuild-install/commit/57bcc06)) (George Waters).
## [6.1.3] - 2021-06-03
### Changed
- Inline no longer maintained `noop-logger` ([#155](https://github.com/prebuild/prebuild-install/issues/155)) ([`e08d75a`](https://github.com/prebuild/prebuild-install/commit/e08d75a)) (Alexandru Dima)
- Point users towards `prebuildify` in README ([#150](https://github.com/prebuild/prebuild-install/issues/150)) ([`5ee1a2f`](https://github.com/prebuild/prebuild-install/commit/5ee1a2f)) (Vincent Weevers)
## [6.1.2] - 2021-04-24
### Fixed
- Support URL-safe strings in scoped packages ([#148](https://github.com/prebuild/prebuild-install/issues/148)) ([`db36c7a`](https://github.com/prebuild/prebuild-install/commit/db36c7a)) (Marco)
## [6.1.1] - 2021-04-04
### Fixed
- Support `force` & `buildFromSource` options in yarn ([#140](https://github.com/prebuild/prebuild-install/issues/140)) ([`8cb1ced`](https://github.com/prebuild/prebuild-install/commit/8cb1ced)) (João Moreno)
- Bump `node-abi` to prevent dedupe (closes [#135](https://github.com/prebuild/prebuild-install/issues/135)) ([`2950fb2`](https://github.com/prebuild/prebuild-install/commit/2950fb2)) (Vincent Weevers)
## [6.1.0] - 2021-04-03
### Added
- Restore local prebuilds feature ([#137](https://github.com/prebuild/prebuild-install/issues/137)) ([`dc4e5ea`](https://github.com/prebuild/prebuild-install/commit/dc4e5ea)) (Wes Roberts). Previously removed in [#81](https://github.com/prebuild/prebuild-install/issues/81) / [`a069253`](https://github.com/prebuild/prebuild-install/commit/a06925378d38ca821bfa93aa4c1fdedc253b2420).
## [6.0.1] - 2021-02-14
### Fixed
- Fixes empty `--tag-prefix` ([#143](https://github.com/prebuild/prebuild-install/issues/143)) ([**@mathiask88**](https://github.com/mathiask88))
## [6.0.0] - 2020-10-23
### Changed
- **Breaking:** don't skip downloads in standalone mode ([`b6f3b36`](https://github.com/prebuild/prebuild-install/commit/b6f3b36)) ([**@vweevers**](https://github.com/vweevers))
### Added
- Document cross platform options ([`e5c9a5a`](https://github.com/prebuild/prebuild-install/commit/e5c9a5a)) ([**@fishbone1**](https://github.com/fishbone1))
### Removed
- **Breaking:** remove `--compile` and `--prebuild` options ([`94f2492`](https://github.com/prebuild/prebuild-install/commit/94f2492)) ([**@vweevers**](https://github.com/vweevers))
### Fixed
- Support npm 7 ([`8acccac`](https://github.com/prebuild/prebuild-install/commit/8acccac), [`08eaf6d`](https://github.com/prebuild/prebuild-install/commit/08eaf6d), [`22175b8`](https://github.com/prebuild/prebuild-install/commit/22175b8)) ([**@vweevers**](https://github.com/vweevers))
## [5.3.6] - 2020-10-20
### Changed
- Replace `mkdirp` dependency with `mkdirp-classic` ([**@ralphtheninja**](https://github.com/ralphtheninja))
[7.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.3
[7.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.2
[7.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.1
[7.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.0
[7.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.1
[7.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.0
[6.1.4]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.4
[6.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.3
[6.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.2
[6.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.1
[6.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.0
[6.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.1
[6.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.0
[5.3.6]: https://github.com/prebuild/prebuild-install/releases/tag/v5.3.6

6
node_modules/prebuild-install/CONTRIBUTING.md generated vendored Normal file
View File

@@ -0,0 +1,6 @@
# Contributing to prebuild
- no commits direct to master
- all commits as pull requests (one or several per PR)
- each commit solves one identifiable problem
- never merge one's own PRs, another contributor does this

21
node_modules/prebuild-install/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

163
node_modules/prebuild-install/README.md generated vendored Normal file
View File

@@ -0,0 +1,163 @@
# prebuild-install
> **A command line tool to easily install prebuilt binaries for multiple versions of Node.js & Electron on a specific platform.**
> By default it downloads prebuilt binaries from a GitHub release.
[![npm](https://img.shields.io/npm/v/prebuild-install.svg)](https://www.npmjs.com/package/prebuild-install)
![Node version](https://img.shields.io/node/v/prebuild-install.svg)
[![Test](https://img.shields.io/github/workflow/status/prebuild/prebuild-install/Test?label=test)](https://github.com/prebuild/prebuild-install/actions/workflows/test.yml)
[![Standard](https://img.shields.io/badge/standard-informational?logo=javascript\&logoColor=fff)](https://standardjs.com)
[![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org)
## Note
**Instead of [`prebuild`](https://github.com/prebuild/prebuild) paired with [`prebuild-install`](https://github.com/prebuild/prebuild-install), we recommend [`prebuildify`](https://github.com/prebuild/prebuildify) paired with [`node-gyp-build`](https://github.com/prebuild/node-gyp-build).**
With `prebuildify`, all prebuilt binaries are shipped inside the package that is published to npm, which means there's no need for a separate download step like you find in `prebuild`. The irony of this approach is that it is faster to download all prebuilt binaries for every platform when they are bundled than it is to download a single prebuilt binary as an install script.
Upsides:
1. No extra download step, making it more reliable and faster to install.
2. Supports changing runtime versions locally and using the same install between Node.js and Electron. Reinstalling or rebuilding is not necessary, as all prebuilt binaries are in the npm tarball and the correct one is simply picked on runtime.
3. The `node-gyp-build` runtime dependency is dependency-free and will remain so out of principle, because introducing dependencies would negate the shorter install time.
4. Prebuilt binaries work even if npm install scripts are disabled.
5. The npm package checksum covers prebuilt binaries too.
Downsides:
1. The installed npm package is larger on disk. Using [Node-API](https://nodejs.org/api/n-api.html) alleviates this because Node-API binaries are runtime-agnostic and forward-compatible.
2. Publishing is mildly more complicated, because `npm publish` must be done after compiling and fetching prebuilt binaries (typically in CI).
## Usage
Use [`prebuild`](https://github.com/prebuild/prebuild) to create and upload prebuilt binaries. Then change your package.json install script to:
```json
{
"scripts": {
"install": "prebuild-install || node-gyp rebuild"
}
}
```
When a consumer then installs your package with npm thus triggering the above install script, `prebuild-install` will download a suitable prebuilt binary, or exit with a non-zero exit code if there is none, which triggers `node-gyp rebuild` in order to build from source.
Options (see below) can be passed to `prebuild-install` like so:
```json
{
"scripts": {
"install": "prebuild-install -r napi || node-gyp rebuild"
}
}
```
### Help
```
prebuild-install [options]
--download -d [url] (download prebuilds, no url means github)
--target -t version (version to install for)
--runtime -r runtime (Node runtime [node, napi or electron] to build or install for, default is node)
--path -p path (make a prebuild-install here)
--token -T gh-token (github token for private repos)
--arch arch (target CPU architecture, see Node OS module docs, default is current arch)
--platform platform (target platform, see Node OS module docs, default is current platform)
--tag-prefix <prefix> (github tag prefix, default is "v")
--build-from-source (skip prebuild download)
--verbose (log verbosely)
--libc (use provided libc rather than system default)
--debug (set Debug or Release configuration)
--version (print prebuild-install version and exit)
```
When `prebuild-install` is run via an `npm` script, options `--build-from-source`, `--debug`, `--download`, `--target`, `--runtime`, `--arch` `--platform` and `--libc` may be passed through via arguments given to the `npm` command.
Alternatively you can set environment variables `npm_config_build_from_source=true`, `npm_config_platform`, `npm_config_arch`, `npm_config_target` `npm_config_runtime` and `npm_config_libc`.
### Libc
On non-glibc Linux platforms, the Libc name is appended to platform name. For example, musl-based environments are called `linuxmusl`. If `--libc=glibc` is passed as option, glibc is discarded and platform is called as just `linux`. This can be used for example to build cross-platform packages on Alpine Linux.
### Private Repositories
`prebuild-install` supports downloading prebuilds from private GitHub repositories using the `-T <github-token>`:
```
$ prebuild-install -T <github-token>
```
If you don't want to use the token on cli you can put it in `~/.prebuild-installrc`:
```
token=<github-token>
```
Alternatively you can specify it in the `prebuild-install_token` environment variable.
Note that using a GitHub token uses the API to resolve the correct release meaning that you are subject to the ([GitHub Rate Limit](https://developer.github.com/v3/rate_limit/)).
### Create GitHub Token
To create a token:
- Go to [this page](https://github.com/settings/tokens)
- Click the `Generate new token` button
- Give the token a name and click the `Generate token` button, see below
![prebuild-token](https://cloud.githubusercontent.com/assets/13285808/20844584/d0b85268-b8c0-11e6-8b08-2b19522165a9.png)
The default scopes should be fine.
### Custom binaries
The end user can override binary download location through environment variables in their .npmrc file.
The variable needs to meet the mask `% your package name %_binary_host` or `% your package name %_binary_host_mirror`. For example:
```
leveldown_binary_host=http://overriden-host.com/overriden-path
```
Note that the package version subpath and file name will still be appended.
So if you are installing `leveldown@1.2.3` the resulting url will be:
```
http://overriden-host.com/overriden-path/v1.2.3/leveldown-v1.2.3-node-v57-win32-x64.tar.gz
```
#### Local prebuilds
If you want to use prebuilds from your local filesystem, you can use the `% your package name %_local_prebuilds` .npmrc variable to set a path to the folder containing prebuilds. For example:
```
leveldown_local_prebuilds=/path/to/prebuilds
```
This option will look directly in that folder for bundles created with `prebuild`, for example:
```
/path/to/prebuilds/leveldown-v1.2.3-node-v57-win32-x64.tar.gz
```
Non-absolute paths resolve relative to the directory of the package invoking prebuild-install, e.g. for nested dependencies.
### Cache
All prebuilt binaries are cached to minimize traffic. So first `prebuild-install` picks binaries from the cache and if no binary could be found, it will be downloaded. Depending on the environment, the cache folder is determined in the following order:
- `${npm_config_cache}/_prebuilds`
- `${APP_DATA}/npm-cache/_prebuilds`
- `${HOME}/.npm/_prebuilds`
## Install
With [npm](https://npmjs.org) do:
```
npm install prebuild-install
```
## License
[MIT](./LICENSE)

44
node_modules/prebuild-install/asset.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
const get = require('simple-get')
const util = require('./util')
const proxy = require('./proxy')
function findAssetId (opts, cb) {
const downloadUrl = util.getDownloadUrl(opts)
const apiUrl = util.getApiUrl(opts)
const log = opts.log || util.noopLogger
log.http('request', 'GET ' + apiUrl)
const reqOpts = proxy({
url: apiUrl,
json: true,
headers: {
'User-Agent': 'simple-get',
Authorization: 'token ' + opts.token
}
}, opts)
const req = get.concat(reqOpts, function (err, res, data) {
if (err) return cb(err)
log.http(res.statusCode, apiUrl)
if (res.statusCode !== 200) return cb(err)
// Find asset id in release
for (const release of data) {
if (release.tag_name === opts['tag-prefix'] + opts.pkg.version) {
for (const asset of release.assets) {
if (asset.browser_download_url === downloadUrl) {
return cb(null, asset.id)
}
}
}
}
cb(new Error('Could not find GitHub release for version'))
})
req.setTimeout(30 * 1000, function () {
req.abort()
})
}
module.exports = findAssetId

78
node_modules/prebuild-install/bin.js generated vendored Executable file
View File

@@ -0,0 +1,78 @@
#!/usr/bin/env node
const path = require('path')
const fs = require('fs')
const napi = require('napi-build-utils')
const pkg = require(path.resolve('package.json'))
const rc = require('./rc')(pkg)
const log = require('./log')(rc, process.env)
const download = require('./download')
const asset = require('./asset')
const util = require('./util')
const prebuildClientVersion = require('./package.json').version
if (rc.version) {
console.log(prebuildClientVersion)
process.exit(0)
}
if (rc.path) process.chdir(rc.path)
if (rc.runtime === 'electron' && rc.target[0] === '4' && rc.abi === '64') {
log.error(`Electron version ${rc.target} found - skipping prebuild-install work due to known ABI issue`)
log.error('More information about this issue can be found at https://github.com/lgeiger/node-abi/issues/54')
process.exit(1)
}
if (!fs.existsSync('package.json')) {
log.error('setup', 'No package.json found. Aborting...')
process.exit(1)
}
if (rc.help) {
console.error(fs.readFileSync(path.join(__dirname, 'help.txt'), 'utf-8'))
process.exit(0)
}
log.info('begin', 'Prebuild-install version', prebuildClientVersion)
const opts = Object.assign({}, rc, { pkg: pkg, log: log })
if (napi.isNapiRuntime(rc.runtime)) napi.logUnsupportedVersion(rc.target, log)
const origin = util.packageOrigin(process.env, pkg)
if (opts.force) {
log.warn('install', 'prebuilt binaries enforced with --force!')
log.warn('install', 'prebuilt binaries may be out of date!')
} else if (origin && origin.length > 4 && origin.substr(0, 4) === 'git+') {
log.info('install', 'installing from git repository, skipping download.')
process.exit(1)
} else if (opts.buildFromSource) {
log.info('install', '--build-from-source specified, not attempting download.')
process.exit(1)
}
const startDownload = function (downloadUrl) {
download(downloadUrl, opts, function (err) {
if (err) {
log.warn('install', err.message)
return process.exit(1)
}
log.info('install', 'Successfully installed prebuilt binary!')
})
}
if (opts.token) {
asset(opts, function (err, assetId) {
if (err) {
log.warn('install', err.message)
return process.exit(1)
}
startDownload(util.getAssetUrl(opts, assetId))
})
} else {
startDownload(util.getDownloadUrl(opts))
}

142
node_modules/prebuild-install/download.js generated vendored Normal file
View File

@@ -0,0 +1,142 @@
const path = require('path')
const fs = require('fs')
const get = require('simple-get')
const pump = require('pump')
const tfs = require('tar-fs')
const zlib = require('zlib')
const util = require('./util')
const error = require('./error')
const proxy = require('./proxy')
const mkdirp = require('mkdirp-classic')
function downloadPrebuild (downloadUrl, opts, cb) {
let cachedPrebuild = util.cachedPrebuild(downloadUrl)
const localPrebuild = util.localPrebuild(downloadUrl, opts)
const tempFile = util.tempFile(cachedPrebuild)
const log = opts.log || util.noopLogger
if (opts.nolocal) return download()
log.info('looking for local prebuild @', localPrebuild)
fs.access(localPrebuild, fs.R_OK | fs.W_OK, function (err) {
if (err && err.code === 'ENOENT') {
return download()
}
log.info('found local prebuild')
cachedPrebuild = localPrebuild
unpack()
})
function download () {
ensureNpmCacheDir(function (err) {
if (err) return onerror(err)
log.info('looking for cached prebuild @', cachedPrebuild)
fs.access(cachedPrebuild, fs.R_OK | fs.W_OK, function (err) {
if (!(err && err.code === 'ENOENT')) {
log.info('found cached prebuild')
return unpack()
}
log.http('request', 'GET ' + downloadUrl)
const reqOpts = proxy({ url: downloadUrl }, opts)
if (opts.token) {
reqOpts.headers = {
'User-Agent': 'simple-get',
Accept: 'application/octet-stream',
Authorization: 'token ' + opts.token
}
}
const req = get(reqOpts, function (err, res) {
if (err) return onerror(err)
log.http(res.statusCode, downloadUrl)
if (res.statusCode !== 200) return onerror()
mkdirp(util.prebuildCache(), function () {
log.info('downloading to @', tempFile)
pump(res, fs.createWriteStream(tempFile), function (err) {
if (err) return onerror(err)
fs.rename(tempFile, cachedPrebuild, function (err) {
if (err) return cb(err)
log.info('renaming to @', cachedPrebuild)
unpack()
})
})
})
})
req.setTimeout(30 * 1000, function () {
req.abort()
})
})
function onerror (err) {
fs.unlink(tempFile, function () {
cb(err || error.noPrebuilts(opts))
})
}
})
}
function unpack () {
let binaryName
const updateName = opts.updateName || function (entry) {
if (/\.node$/i.test(entry.name)) binaryName = entry.name
}
log.info('unpacking @', cachedPrebuild)
const options = {
readable: true,
writable: true,
hardlinkAsFilesFallback: true
}
const extract = tfs.extract(opts.path, options).on('entry', updateName)
pump(fs.createReadStream(cachedPrebuild), zlib.createGunzip(), extract,
function (err) {
if (err) return cb(err)
let resolved
if (binaryName) {
try {
resolved = path.resolve(opts.path || '.', binaryName)
} catch (err) {
return cb(err)
}
log.info('unpack', 'resolved to ' + resolved)
if (opts.runtime === 'node' && opts.platform === process.platform && opts.abi === process.versions.modules && opts.arch === process.arch) {
try {
require(resolved)
} catch (err) {
return cb(err)
}
log.info('unpack', 'required ' + resolved + ' successfully')
}
}
cb(null, resolved)
})
}
function ensureNpmCacheDir (cb) {
const cacheFolder = util.npmCache()
fs.access(cacheFolder, fs.R_OK | fs.W_OK, function (err) {
if (err && err.code === 'ENOENT') {
return makeNpmCacheDir()
}
cb(err)
})
function makeNpmCacheDir () {
log.info('npm cache directory missing, creating it...')
mkdirp(cacheFolder, cb)
}
}
}
module.exports = downloadPrebuild

14
node_modules/prebuild-install/error.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
exports.noPrebuilts = function (opts) {
return new Error([
'No prebuilt binaries found',
'(target=' + opts.target,
'runtime=' + opts.runtime,
'arch=' + opts.arch,
'libc=' + opts.libc,
'platform=' + opts.platform + ')'
].join(' '))
}
exports.invalidArchive = function () {
return new Error('Missing .node file in archive')
}

16
node_modules/prebuild-install/help.txt generated vendored Normal file
View File

@@ -0,0 +1,16 @@
prebuild-install [options]
--download -d [url] (download prebuilds, no url means github)
--target -t version (version to install for)
--runtime -r runtime (Node runtime [node or electron] to build or install for, default is node)
--path -p path (make a prebuild-install here)
--token -T gh-token (github token for private repos)
--arch arch (target CPU architecture, see Node OS module docs, default is current arch)
--platform platform (target platform, see Node OS module docs, default is current platform)
--tag-prefix <prefix> (github tag prefix, default is "v")
--force (always use prebuilt binaries when available)
--build-from-source (skip prebuild download)
--verbose (log verbosely)
--libc (use provided libc rather than system default)
--debug (set Debug or Release configuration)
--version (print prebuild-install version and exit)

1
node_modules/prebuild-install/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
exports.download = require('./download')

33
node_modules/prebuild-install/log.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
const levels = {
silent: 0,
error: 1,
warn: 2,
notice: 3,
http: 4,
timing: 5,
info: 6,
verbose: 7,
silly: 8
}
module.exports = function (rc, env) {
const level = rc.verbose
? 'verbose'
: env.npm_config_loglevel || 'notice'
const logAtLevel = function (messageLevel) {
return function (...args) {
if (levels[messageLevel] <= levels[level]) {
console.error(`prebuild-install ${messageLevel} ${args.join(' ')}`)
}
}
}
return {
error: logAtLevel('error'),
warn: logAtLevel('warn'),
http: logAtLevel('http'),
info: logAtLevel('info'),
level
}
}

View File

@@ -0,0 +1,38 @@
# Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
* (a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
* (b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
* (c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
* (d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
## Moderation Policy
The [Node.js Moderation Policy] applies to this WG.
## Code of Conduct
The [Node.js Code of Conduct][] applies to this WG.
[Node.js Code of Conduct]:
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
[Node.js Moderation Policy]:
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md

View File

@@ -0,0 +1,136 @@
### Streams Working Group
The Node.js Streams is jointly governed by a Working Group
(WG)
that is responsible for high-level guidance of the project.
The WG has final authority over this project including:
* Technical direction
* Project governance and process (including this policy)
* Contribution policy
* GitHub repository hosting
* Conduct guidelines
* Maintaining the list of additional Collaborators
For the current list of WG members, see the project
[README.md](./README.md#current-project-team-members).
### Collaborators
The readable-stream GitHub repository is
maintained by the WG and additional Collaborators who are added by the
WG on an ongoing basis.
Individuals making significant and valuable contributions are made
Collaborators and given commit-access to the project. These
individuals are identified by the WG and their addition as
Collaborators is discussed during the WG meeting.
_Note:_ If you make a significant contribution and are not considered
for commit-access log an issue or contact a WG member directly and it
will be brought up in the next WG meeting.
Modifications of the contents of the readable-stream repository are
made on
a collaborative basis. Anybody with a GitHub account may propose a
modification via pull request and it will be considered by the project
Collaborators. All pull requests must be reviewed and accepted by a
Collaborator with sufficient expertise who is able to take full
responsibility for the change. In the case of pull requests proposed
by an existing Collaborator, an additional Collaborator is required
for sign-off. Consensus should be sought if additional Collaborators
participate and there is disagreement around a particular
modification. See _Consensus Seeking Process_ below for further detail
on the consensus model used for governance.
Collaborators may opt to elevate significant or controversial
modifications, or modifications that have not found consensus to the
WG for discussion by assigning the ***WG-agenda*** tag to a pull
request or issue. The WG should serve as the final arbiter where
required.
For the current list of Collaborators, see the project
[README.md](./README.md#members).
### WG Membership
WG seats are not time-limited. There is no fixed size of the WG.
However, the expected target is between 6 and 12, to ensure adequate
coverage of important areas of expertise, balanced with the ability to
make decisions efficiently.
There is no specific set of requirements or qualifications for WG
membership beyond these rules.
The WG may add additional members to the WG by unanimous consensus.
A WG member may be removed from the WG by voluntary resignation, or by
unanimous consensus of all other WG members.
Changes to WG membership should be posted in the agenda, and may be
suggested as any other agenda item (see "WG Meetings" below).
If an addition or removal is proposed during a meeting, and the full
WG is not in attendance to participate, then the addition or removal
is added to the agenda for the subsequent meeting. This is to ensure
that all members are given the opportunity to participate in all
membership decisions. If a WG member is unable to attend a meeting
where a planned membership decision is being made, then their consent
is assumed.
No more than 1/3 of the WG members may be affiliated with the same
employer. If removal or resignation of a WG member, or a change of
employment by a WG member, creates a situation where more than 1/3 of
the WG membership shares an employer, then the situation must be
immediately remedied by the resignation or removal of one or more WG
members affiliated with the over-represented employer(s).
### WG Meetings
The WG meets occasionally on a Google Hangout On Air. A designated moderator
approved by the WG runs the meeting. Each meeting should be
published to YouTube.
Items are added to the WG agenda that are considered contentious or
are modifications of governance, contribution policy, WG membership,
or release process.
The intention of the agenda is not to approve or review all patches;
that should happen continuously on GitHub and be handled by the larger
group of Collaborators.
Any community member or contributor can ask that something be added to
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
WG member or the moderator can add the item to the agenda by adding
the ***WG-agenda*** tag to the issue.
Prior to each WG meeting the moderator will share the Agenda with
members of the WG. WG members can add any items they like to the
agenda at the beginning of each meeting. The moderator and the WG
cannot veto or remove items.
The WG may invite persons or representatives from certain projects to
participate in a non-voting capacity.
The moderator is responsible for summarizing the discussion of each
agenda item and sends it as a pull request after the meeting.
### Consensus Seeking Process
The WG follows a
[Consensus
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
decision-making model.
When an agenda item has appeared to reach a consensus the moderator
will ask "Does anyone object?" as a final call for dissent from the
consensus.
If an agenda item cannot reach a consensus a WG member can call for
either a closing vote or a vote to table the issue to the next
meeting. The call for a vote must be seconded by a majority of the WG
or else the discussion will continue. Simple majority wins.
Note that changes to WG membership require a majority consensus. See
"WG Membership" above.

View File

@@ -0,0 +1,47 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

View File

@@ -0,0 +1,106 @@
# readable-stream
***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream)
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream)
```bash
npm install --save readable-stream
```
This package is a mirror of the streams implementations in Node.js.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html).
If you want to guarantee a stable streams base, regardless of what version of
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
As of version 2.0.0 **readable-stream** uses semantic versioning.
## Version 3.x.x
v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
1. Error codes: https://github.com/nodejs/node/pull/13310,
https://github.com/nodejs/node/pull/13291,
https://github.com/nodejs/node/pull/16589,
https://github.com/nodejs/node/pull/15042,
https://github.com/nodejs/node/pull/15665,
https://github.com/nodejs/readable-stream/pull/344
2. 'readable' have precedence over flowing
https://github.com/nodejs/node/pull/18994
3. make virtual methods errors consistent
https://github.com/nodejs/node/pull/18813
4. updated streams error handling
https://github.com/nodejs/node/pull/18438
5. writable.end should return this.
https://github.com/nodejs/node/pull/18780
6. readable continues to read when push('')
https://github.com/nodejs/node/pull/18211
7. add custom inspect to BufferList
https://github.com/nodejs/node/pull/17907
8. always defer 'readable' with nextTick
https://github.com/nodejs/node/pull/17979
## Version 2.x.x
v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
### Big Thanks
Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce]
# Usage
You can swap your `require('stream')` with `require('readable-stream')`
without any changes, if you are just using one of the main classes and
functions.
```js
const {
Readable,
Writable,
Transform,
Duplex,
pipeline,
finished
} = require('readable-stream')
````
Note that `require('stream')` will return `Stream`, while
`require('readable-stream')` will return `Readable`. We discourage using
whatever is exported directly, but rather use one of the properties as
shown in the example above.
# Streams Working Group
`readable-stream` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
<a name="members"></a>
## Team Members
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) &lt;shestak.irina@gmail.com&gt;
* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) &lt;yoshuawuyts@gmail.com&gt;
[sauce]: https://saucelabs.com

View File

@@ -0,0 +1,127 @@
'use strict';
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
var codes = {};
function createErrorType(code, message, Base) {
if (!Base) {
Base = Error;
}
function getMessage(arg1, arg2, arg3) {
if (typeof message === 'string') {
return message;
} else {
return message(arg1, arg2, arg3);
}
}
var NodeError =
/*#__PURE__*/
function (_Base) {
_inheritsLoose(NodeError, _Base);
function NodeError(arg1, arg2, arg3) {
return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
}
return NodeError;
}(Base);
NodeError.prototype.name = Base.name;
NodeError.prototype.code = code;
codes[code] = NodeError;
} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
function oneOf(expected, thing) {
if (Array.isArray(expected)) {
var len = expected.length;
expected = expected.map(function (i) {
return String(i);
});
if (len > 2) {
return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
} else if (len === 2) {
return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
} else {
return "of ".concat(thing, " ").concat(expected[0]);
}
} else {
return "of ".concat(thing, " ").concat(String(expected));
}
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
function startsWith(str, search, pos) {
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
function endsWith(str, search, this_len) {
if (this_len === undefined || this_len > str.length) {
this_len = str.length;
}
return str.substring(this_len - search.length, this_len) === search;
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
function includes(str, search, start) {
if (typeof start !== 'number') {
start = 0;
}
if (start + search.length > str.length) {
return false;
} else {
return str.indexOf(search, start) !== -1;
}
}
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
return 'The value "' + value + '" is invalid for option "' + name + '"';
}, TypeError);
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
// determiner: 'must be' or 'must not be'
var determiner;
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
determiner = 'must not be';
expected = expected.replace(/^not /, '');
} else {
determiner = 'must be';
}
var msg;
if (endsWith(name, ' argument')) {
// For cases like 'first argument'
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
} else {
var type = includes(name, '.') ? 'property' : 'argument';
msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
}
msg += ". Received type ".concat(typeof actual);
return msg;
}, TypeError);
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
return 'The ' + name + ' method is not implemented';
});
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
createErrorType('ERR_STREAM_DESTROYED', function (name) {
return 'Cannot call ' + name + ' after a stream was destroyed';
});
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
return 'Unknown encoding: ' + arg;
}, TypeError);
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;

View File

@@ -0,0 +1,116 @@
'use strict';
const codes = {};
function createErrorType(code, message, Base) {
if (!Base) {
Base = Error
}
function getMessage (arg1, arg2, arg3) {
if (typeof message === 'string') {
return message
} else {
return message(arg1, arg2, arg3)
}
}
class NodeError extends Base {
constructor (arg1, arg2, arg3) {
super(getMessage(arg1, arg2, arg3));
}
}
NodeError.prototype.name = Base.name;
NodeError.prototype.code = code;
codes[code] = NodeError;
}
// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
function oneOf(expected, thing) {
if (Array.isArray(expected)) {
const len = expected.length;
expected = expected.map((i) => String(i));
if (len > 2) {
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
expected[len - 1];
} else if (len === 2) {
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
} else {
return `of ${thing} ${expected[0]}`;
}
} else {
return `of ${thing} ${String(expected)}`;
}
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
function startsWith(str, search, pos) {
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
function endsWith(str, search, this_len) {
if (this_len === undefined || this_len > str.length) {
this_len = str.length;
}
return str.substring(this_len - search.length, this_len) === search;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
function includes(str, search, start) {
if (typeof start !== 'number') {
start = 0;
}
if (start + search.length > str.length) {
return false;
} else {
return str.indexOf(search, start) !== -1;
}
}
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
return 'The value "' + value + '" is invalid for option "' + name + '"'
}, TypeError);
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
// determiner: 'must be' or 'must not be'
let determiner;
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
determiner = 'must not be';
expected = expected.replace(/^not /, '');
} else {
determiner = 'must be';
}
let msg;
if (endsWith(name, ' argument')) {
// For cases like 'first argument'
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
} else {
const type = includes(name, '.') ? 'property' : 'argument';
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
}
msg += `. Received type ${typeof actual}`;
return msg;
}, TypeError);
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
return 'The ' + name + ' method is not implemented'
});
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
createErrorType('ERR_STREAM_DESTROYED', function (name) {
return 'Cannot call ' + name + ' after a stream was destroyed';
});
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
return 'Unknown encoding: ' + arg
}, TypeError);
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;

View File

@@ -0,0 +1,17 @@
'use strict'
var experimentalWarnings = new Set();
function emitExperimentalWarning(feature) {
if (experimentalWarnings.has(feature)) return;
var msg = feature + ' is an experimental feature. This feature could ' +
'change at any time';
experimentalWarnings.add(feature);
process.emitWarning(msg, 'ExperimentalWarning');
}
function noop() {}
module.exports.emitExperimentalWarning = process.emitWarning
? emitExperimentalWarning
: noop;

View File

@@ -0,0 +1,126 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
'use strict';
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
};
/*</replacement>*/
module.exports = Duplex;
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
require('inherits')(Duplex, Readable);
{
// Allow the keys array to be GC'ed.
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
}
function Duplex(options) {
if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
this.allowHalfOpen = true;
if (options) {
if (options.readable === false) this.readable = false;
if (options.writable === false) this.writable = false;
if (options.allowHalfOpen === false) {
this.allowHalfOpen = false;
this.once('end', onend);
}
}
}
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.highWaterMark;
}
});
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState && this._writableState.getBuffer();
}
});
Object.defineProperty(Duplex.prototype, 'writableLength', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.length;
}
});
// the no-half-open enforcer
function onend() {
// If the writable side ended, then we're ok.
if (this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex.prototype, 'destroyed', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function set(value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});

View File

@@ -0,0 +1,37 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
require('inherits')(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,190 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var _require$codes = require('../errors').codes,
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
var Duplex = require('./_stream_duplex');
require('inherits')(Transform, Duplex);
function afterTransform(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (cb === null) {
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
}
ts.writechunk = null;
ts.writecb = null;
if (data != null)
// single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = {
afterTransform: afterTransform.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
};
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish);
}
function prefinish() {
var _this = this;
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
this._flush(function (er, data) {
done(_this, er, data);
});
} else {
done(this, null, null);
}
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform.prototype._destroy = function (err, cb) {
Duplex.prototype._destroy.call(this, err, function (err2) {
cb(err2);
});
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null)
// single equals check for both `null` and `undefined`
stream.push(data);
// TODO(BridgeAR): Write a test for these two error cases
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
return stream.push(null);
}

View File

@@ -0,0 +1,641 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
'use strict';
module.exports = Writable;
/* <replacement> */
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
this.next = null;
}
// It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function () {
onCorkedFinish(_this, state);
};
}
/* </replacement> */
/*<replacement>*/
var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var internalUtil = {
deprecate: require('util-deprecate')
};
/*</replacement>*/
/*<replacement>*/
var Stream = require('./internal/streams/stream');
/*</replacement>*/
var Buffer = require('buffer').Buffer;
var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
function _uint8ArrayToBuffer(chunk) {
return Buffer.from(chunk);
}
function _isUint8Array(obj) {
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
}
var destroyImpl = require('./internal/streams/destroy');
var _require = require('./internal/streams/state'),
getHighWaterMark = _require.getHighWaterMark;
var _require$codes = require('../errors').codes,
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
var errorOrDestroy = destroyImpl.errorOrDestroy;
require('inherits')(Writable, Stream);
function nop() {}
function WritableState(options, stream, isDuplex) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream,
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);
// if _final has been called
this.finalCalled = false;
// drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// has it been destroyed
this.destroyed = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
// Should close be emitted on destroy. Defaults to true.
this.emitClose = options.emitClose !== false;
// Should .destroy() be called after 'finish' (and potentially 'end')
this.autoDestroy = !!options.autoDestroy;
// count buffered requests
this.bufferedRequestCount = 0;
// allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState.prototype, 'buffer', {
get: internalUtil.deprecate(function writableStateBufferGetter() {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
});
} catch (_) {}
})();
// Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable, Symbol.hasInstance, {
value: function value(object) {
if (realHasInstance.call(this, object)) return true;
if (this !== Writable) return false;
return object && object._writableState instanceof WritableState;
}
});
} else {
realHasInstance = function realHasInstance(object) {
return object instanceof this;
};
}
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex');
// Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
// Checking for a Stream.Duplex instance is faster here instead of inside
// the WritableState constructor, at least with V8 6.5
var isDuplex = this instanceof Duplex;
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
this._writableState = new WritableState(options, this, isDuplex);
// legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
if (typeof options.final === 'function') this._final = options.final;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
};
function writeAfterEnd(stream, cb) {
var er = new ERR_STREAM_WRITE_AFTER_END();
// TODO: defer error events consistently everywhere, not just the cb
errorOrDestroy(stream, er);
process.nextTick(cb, er);
}
// Checks that a user-supplied chunk is valid, especially for the particular
// mode the stream is in. Currently this means that `null` is never accepted
// and undefined/non-string values are only allowed in object mode.
function validChunk(stream, state, chunk, cb) {
var er;
if (chunk === null) {
er = new ERR_STREAM_NULL_VALUES();
} else if (typeof chunk !== 'string' && !state.objectMode) {
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
}
if (er) {
errorOrDestroy(stream, er);
process.nextTick(cb, er);
return false;
}
return true;
}
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
var isBuf = !state.objectMode && _isUint8Array(chunk);
if (isBuf && !Buffer.isBuffer(chunk)) {
chunk = _uint8ArrayToBuffer(chunk);
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function () {
this._writableState.corked++;
};
Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
Object.defineProperty(Writable.prototype, 'writableBuffer', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState && this._writableState.getBuffer();
}
});
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
return chunk;
}
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.highWaterMark;
}
});
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
if (!isBuf) {
var newChunk = decodeChunk(state, chunk, encoding);
if (chunk !== newChunk) {
isBuf = true;
encoding = 'buffer';
chunk = newChunk;
}
}
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = {
chunk: chunk,
encoding: encoding,
isBuf: isBuf,
callback: cb,
next: null
};
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) {
// defer the callback if we are being called synchronously
// to avoid piling up things on the stack
process.nextTick(cb, er);
// this can emit finish, and it will always happen
// after error
process.nextTick(finishMaybe, stream, state);
stream._writableState.errorEmitted = true;
errorOrDestroy(stream, er);
} else {
// the caller expect this to happen before if
// it is async
cb(er);
stream._writableState.errorEmitted = true;
errorOrDestroy(stream, er);
// this can emit finish, but finish must
// always follow error
finishMaybe(stream, state);
}
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
onwriteStateUpdate(state);
if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state) || stream.destroyed;
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
process.nextTick(afterWrite, stream, state, finished, cb);
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
var allBuffers = true;
while (entry) {
buffer[count] = entry;
if (!entry.isBuf) allBuffers = false;
entry = entry.next;
count += 1;
}
buffer.allBuffers = allBuffers;
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
// doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
state.bufferedRequestCount = 0;
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
state.bufferedRequestCount--;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable.prototype._write = function (chunk, encoding, cb) {
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending) endWritable(this, state, cb);
return this;
};
Object.defineProperty(Writable.prototype, 'writableLength', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.length;
}
});
function needFinish(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function callFinal(stream, state) {
stream._final(function (err) {
state.pendingcb--;
if (err) {
errorOrDestroy(stream, err);
}
state.prefinished = true;
stream.emit('prefinish');
finishMaybe(stream, state);
});
}
function prefinish(stream, state) {
if (!state.prefinished && !state.finalCalled) {
if (typeof stream._final === 'function' && !state.destroyed) {
state.pendingcb++;
state.finalCalled = true;
process.nextTick(callFinal, stream, state);
} else {
state.prefinished = true;
stream.emit('prefinish');
}
}
}
function finishMaybe(stream, state) {
var need = needFinish(state);
if (need) {
prefinish(stream, state);
if (state.pendingcb === 0) {
state.finished = true;
stream.emit('finish');
if (state.autoDestroy) {
// In case of duplex streams we need a way to detect
// if the readable side is ready for autoDestroy as well
var rState = stream._readableState;
if (!rState || rState.autoDestroy && rState.endEmitted) {
stream.destroy();
}
}
}
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
function onCorkedFinish(corkReq, state, err) {
var entry = corkReq.entry;
corkReq.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
}
// reuse the free corkReq.
state.corkedRequestsFree.next = corkReq;
}
Object.defineProperty(Writable.prototype, 'destroyed', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
if (this._writableState === undefined) {
return false;
}
return this._writableState.destroyed;
},
set: function set(value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._writableState) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._writableState.destroyed = value;
}
});
Writable.prototype.destroy = destroyImpl.destroy;
Writable.prototype._undestroy = destroyImpl.undestroy;
Writable.prototype._destroy = function (err, cb) {
cb(err);
};

View File

@@ -0,0 +1,180 @@
'use strict';
var _Object$setPrototypeO;
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
var finished = require('./end-of-stream');
var kLastResolve = Symbol('lastResolve');
var kLastReject = Symbol('lastReject');
var kError = Symbol('error');
var kEnded = Symbol('ended');
var kLastPromise = Symbol('lastPromise');
var kHandlePromise = Symbol('handlePromise');
var kStream = Symbol('stream');
function createIterResult(value, done) {
return {
value: value,
done: done
};
}
function readAndResolve(iter) {
var resolve = iter[kLastResolve];
if (resolve !== null) {
var data = iter[kStream].read();
// we defer if data is null
// we can be expecting either 'end' or
// 'error'
if (data !== null) {
iter[kLastPromise] = null;
iter[kLastResolve] = null;
iter[kLastReject] = null;
resolve(createIterResult(data, false));
}
}
}
function onReadable(iter) {
// we wait for the next tick, because it might
// emit an error with process.nextTick
process.nextTick(readAndResolve, iter);
}
function wrapForNext(lastPromise, iter) {
return function (resolve, reject) {
lastPromise.then(function () {
if (iter[kEnded]) {
resolve(createIterResult(undefined, true));
return;
}
iter[kHandlePromise](resolve, reject);
}, reject);
};
}
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
get stream() {
return this[kStream];
},
next: function next() {
var _this = this;
// if we have detected an error in the meanwhile
// reject straight away
var error = this[kError];
if (error !== null) {
return Promise.reject(error);
}
if (this[kEnded]) {
return Promise.resolve(createIterResult(undefined, true));
}
if (this[kStream].destroyed) {
// We need to defer via nextTick because if .destroy(err) is
// called, the error will be emitted via nextTick, and
// we cannot guarantee that there is no error lingering around
// waiting to be emitted.
return new Promise(function (resolve, reject) {
process.nextTick(function () {
if (_this[kError]) {
reject(_this[kError]);
} else {
resolve(createIterResult(undefined, true));
}
});
});
}
// if we have multiple next() calls
// we will wait for the previous Promise to finish
// this logic is optimized to support for await loops,
// where next() is only called once at a time
var lastPromise = this[kLastPromise];
var promise;
if (lastPromise) {
promise = new Promise(wrapForNext(lastPromise, this));
} else {
// fast path needed to support multiple this.push()
// without triggering the next() queue
var data = this[kStream].read();
if (data !== null) {
return Promise.resolve(createIterResult(data, false));
}
promise = new Promise(this[kHandlePromise]);
}
this[kLastPromise] = promise;
return promise;
}
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
return this;
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
var _this2 = this;
// destroy(err, cb) is a private API
// we can guarantee we have that here, because we control the
// Readable class this is attached to
return new Promise(function (resolve, reject) {
_this2[kStream].destroy(null, function (err) {
if (err) {
reject(err);
return;
}
resolve(createIterResult(undefined, true));
});
});
}), _Object$setPrototypeO), AsyncIteratorPrototype);
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
var _Object$create;
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
value: stream,
writable: true
}), _defineProperty(_Object$create, kLastResolve, {
value: null,
writable: true
}), _defineProperty(_Object$create, kLastReject, {
value: null,
writable: true
}), _defineProperty(_Object$create, kError, {
value: null,
writable: true
}), _defineProperty(_Object$create, kEnded, {
value: stream._readableState.endEmitted,
writable: true
}), _defineProperty(_Object$create, kHandlePromise, {
value: function value(resolve, reject) {
var data = iterator[kStream].read();
if (data) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
resolve(createIterResult(data, false));
} else {
iterator[kLastResolve] = resolve;
iterator[kLastReject] = reject;
}
},
writable: true
}), _Object$create));
iterator[kLastPromise] = null;
finished(stream, function (err) {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
var reject = iterator[kLastReject];
// reject if we are waiting for data in the Promise
// returned by next() and store the error
if (reject !== null) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
reject(err);
}
iterator[kError] = err;
return;
}
var resolve = iterator[kLastResolve];
if (resolve !== null) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
resolve(createIterResult(undefined, true));
}
iterator[kEnded] = true;
});
stream.on('readable', onReadable.bind(null, iterator));
return iterator;
};
module.exports = createReadableStreamAsyncIterator;

View File

@@ -0,0 +1,183 @@
'use strict';
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
var _require = require('buffer'),
Buffer = _require.Buffer;
var _require2 = require('util'),
inspect = _require2.inspect;
var custom = inspect && inspect.custom || 'inspect';
function copyBuffer(src, target, offset) {
Buffer.prototype.copy.call(src, target, offset);
}
module.exports = /*#__PURE__*/function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
_createClass(BufferList, [{
key: "push",
value: function push(v) {
var entry = {
data: v,
next: null
};
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
}
}, {
key: "unshift",
value: function unshift(v) {
var entry = {
data: v,
next: this.head
};
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
}
}, {
key: "shift",
value: function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
}
}, {
key: "clear",
value: function clear() {
this.head = this.tail = null;
this.length = 0;
}
}, {
key: "join",
value: function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) ret += s + p.data;
return ret;
}
}, {
key: "concat",
value: function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
}
// Consumes a specified amount of bytes or characters from the buffered data.
}, {
key: "consume",
value: function consume(n, hasStrings) {
var ret;
if (n < this.head.data.length) {
// `slice` is the same for buffers and strings.
ret = this.head.data.slice(0, n);
this.head.data = this.head.data.slice(n);
} else if (n === this.head.data.length) {
// First chunk is a perfect match.
ret = this.shift();
} else {
// Result spans more than one buffer.
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
}
return ret;
}
}, {
key: "first",
value: function first() {
return this.head.data;
}
// Consumes a specified amount of characters from the buffered data.
}, {
key: "_getString",
value: function _getString(n) {
var p = this.head;
var c = 1;
var ret = p.data;
n -= ret.length;
while (p = p.next) {
var str = p.data;
var nb = n > str.length ? str.length : n;
if (nb === str.length) ret += str;else ret += str.slice(0, n);
n -= nb;
if (n === 0) {
if (nb === str.length) {
++c;
if (p.next) this.head = p.next;else this.head = this.tail = null;
} else {
this.head = p;
p.data = str.slice(nb);
}
break;
}
++c;
}
this.length -= c;
return ret;
}
// Consumes a specified amount of bytes from the buffered data.
}, {
key: "_getBuffer",
value: function _getBuffer(n) {
var ret = Buffer.allocUnsafe(n);
var p = this.head;
var c = 1;
p.data.copy(ret);
n -= p.data.length;
while (p = p.next) {
var buf = p.data;
var nb = n > buf.length ? buf.length : n;
buf.copy(ret, ret.length - n, 0, nb);
n -= nb;
if (n === 0) {
if (nb === buf.length) {
++c;
if (p.next) this.head = p.next;else this.head = this.tail = null;
} else {
this.head = p;
p.data = buf.slice(nb);
}
break;
}
++c;
}
this.length -= c;
return ret;
}
// Make sure the linked list only shows the minimal necessary information.
}, {
key: custom,
value: function value(_, options) {
return inspect(this, _objectSpread(_objectSpread({}, options), {}, {
// Only inspect one level.
depth: 0,
// It should not recurse.
customInspect: false
}));
}
}]);
return BufferList;
}();

View File

@@ -0,0 +1,96 @@
'use strict';
// undocumented cb() API, needed for core, not for public API
function destroy(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err) {
if (!this._writableState) {
process.nextTick(emitErrorNT, this, err);
} else if (!this._writableState.errorEmitted) {
this._writableState.errorEmitted = true;
process.nextTick(emitErrorNT, this, err);
}
}
return this;
}
// we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
}
// if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
if (!_this._writableState) {
process.nextTick(emitErrorAndCloseNT, _this, err);
} else if (!_this._writableState.errorEmitted) {
_this._writableState.errorEmitted = true;
process.nextTick(emitErrorAndCloseNT, _this, err);
} else {
process.nextTick(emitCloseNT, _this);
}
} else if (cb) {
process.nextTick(emitCloseNT, _this);
cb(err);
} else {
process.nextTick(emitCloseNT, _this);
}
});
return this;
}
function emitErrorAndCloseNT(self, err) {
emitErrorNT(self, err);
emitCloseNT(self);
}
function emitCloseNT(self) {
if (self._writableState && !self._writableState.emitClose) return;
if (self._readableState && !self._readableState.emitClose) return;
self.emit('close');
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finalCalled = false;
this._writableState.prefinished = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
function errorOrDestroy(stream, err) {
// We have tests that rely on errors being emitted
// in the same tick, so changing this is semver major.
// For now when you opt-in to autoDestroy we allow
// the error to be emitted nextTick. In a future
// semver major update we should change the default to this.
var rState = stream._readableState;
var wState = stream._writableState;
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
}
module.exports = {
destroy: destroy,
undestroy: undestroy,
errorOrDestroy: errorOrDestroy
};

View File

@@ -0,0 +1,86 @@
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
function once(callback) {
var called = false;
return function () {
if (called) return;
called = true;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
callback.apply(this, args);
};
}
function noop() {}
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === 'function';
}
function eos(stream, opts, callback) {
if (typeof opts === 'function') return eos(stream, null, opts);
if (!opts) opts = {};
callback = once(callback || noop);
var readable = opts.readable || opts.readable !== false && stream.readable;
var writable = opts.writable || opts.writable !== false && stream.writable;
var onlegacyfinish = function onlegacyfinish() {
if (!stream.writable) onfinish();
};
var writableEnded = stream._writableState && stream._writableState.finished;
var onfinish = function onfinish() {
writable = false;
writableEnded = true;
if (!readable) callback.call(stream);
};
var readableEnded = stream._readableState && stream._readableState.endEmitted;
var onend = function onend() {
readable = false;
readableEnded = true;
if (!writable) callback.call(stream);
};
var onerror = function onerror(err) {
callback.call(stream, err);
};
var onclose = function onclose() {
var err;
if (readable && !readableEnded) {
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
return callback.call(stream, err);
}
if (writable && !writableEnded) {
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
return callback.call(stream, err);
}
};
var onrequest = function onrequest() {
stream.req.on('finish', onfinish);
};
if (isRequest(stream)) {
stream.on('complete', onfinish);
stream.on('abort', onclose);
if (stream.req) onrequest();else stream.on('request', onrequest);
} else if (writable && !stream._writableState) {
// legacy streams
stream.on('end', onlegacyfinish);
stream.on('close', onlegacyfinish);
}
stream.on('end', onend);
stream.on('finish', onfinish);
if (opts.error !== false) stream.on('error', onerror);
stream.on('close', onclose);
return function () {
stream.removeListener('complete', onfinish);
stream.removeListener('abort', onclose);
stream.removeListener('request', onrequest);
if (stream.req) stream.req.removeListener('finish', onfinish);
stream.removeListener('end', onlegacyfinish);
stream.removeListener('close', onlegacyfinish);
stream.removeListener('finish', onfinish);
stream.removeListener('end', onend);
stream.removeListener('error', onerror);
stream.removeListener('close', onclose);
};
}
module.exports = eos;

View File

@@ -0,0 +1,3 @@
module.exports = function () {
throw new Error('Readable.from is not available in the browser')
};

View File

@@ -0,0 +1,52 @@
'use strict';
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;
function from(Readable, iterable, opts) {
var iterator;
if (iterable && typeof iterable.next === 'function') {
iterator = iterable;
} else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
var readable = new Readable(_objectSpread({
objectMode: true
}, opts));
// Reading boolean to protect against _read
// being called before last iteration completion.
var reading = false;
readable._read = function () {
if (!reading) {
reading = true;
next();
}
};
function next() {
return _next2.apply(this, arguments);
}
function _next2() {
_next2 = _asyncToGenerator(function* () {
try {
var _yield$iterator$next = yield iterator.next(),
value = _yield$iterator$next.value,
done = _yield$iterator$next.done;
if (done) {
readable.push(null);
} else if (readable.push(yield value)) {
next();
} else {
reading = false;
}
} catch (err) {
readable.destroy(err);
}
});
return _next2.apply(this, arguments);
}
return readable;
}
module.exports = from;

View File

@@ -0,0 +1,86 @@
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
var eos;
function once(callback) {
var called = false;
return function () {
if (called) return;
called = true;
callback.apply(void 0, arguments);
};
}
var _require$codes = require('../../../errors').codes,
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
function noop(err) {
// Rethrow the error if it exists to avoid swallowing it
if (err) throw err;
}
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === 'function';
}
function destroyer(stream, reading, writing, callback) {
callback = once(callback);
var closed = false;
stream.on('close', function () {
closed = true;
});
if (eos === undefined) eos = require('./end-of-stream');
eos(stream, {
readable: reading,
writable: writing
}, function (err) {
if (err) return callback(err);
closed = true;
callback();
});
var destroyed = false;
return function (err) {
if (closed) return;
if (destroyed) return;
destroyed = true;
// request.destroy just do .end - .abort is what we want
if (isRequest(stream)) return stream.abort();
if (typeof stream.destroy === 'function') return stream.destroy();
callback(err || new ERR_STREAM_DESTROYED('pipe'));
};
}
function call(fn) {
fn();
}
function pipe(from, to) {
return from.pipe(to);
}
function popCallback(streams) {
if (!streams.length) return noop;
if (typeof streams[streams.length - 1] !== 'function') return noop;
return streams.pop();
}
function pipeline() {
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
streams[_key] = arguments[_key];
}
var callback = popCallback(streams);
if (Array.isArray(streams[0])) streams = streams[0];
if (streams.length < 2) {
throw new ERR_MISSING_ARGS('streams');
}
var error;
var destroys = streams.map(function (stream, i) {
var reading = i < streams.length - 1;
var writing = i > 0;
return destroyer(stream, reading, writing, function (err) {
if (!error) error = err;
if (err) destroys.forEach(call);
if (reading) return;
destroys.forEach(call);
callback(error);
});
});
return streams.reduce(pipe);
}
module.exports = pipeline;

View File

@@ -0,0 +1,22 @@
'use strict';
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
function highWaterMarkFrom(options, isDuplex, duplexKey) {
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
}
function getHighWaterMark(state, options, duplexKey, isDuplex) {
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
if (hwm != null) {
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
var name = isDuplex ? duplexKey : 'highWaterMark';
throw new ERR_INVALID_OPT_VALUE(name, hwm);
}
return Math.floor(hwm);
}
// Default value
return state.objectMode ? 16 : 16 * 1024;
}
module.exports = {
getHighWaterMark: getHighWaterMark
};

View File

@@ -0,0 +1 @@
module.exports = require('events').EventEmitter;

View File

@@ -0,0 +1 @@
module.exports = require('stream');

View File

@@ -0,0 +1,68 @@
{
"name": "readable-stream",
"version": "3.6.2",
"description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"engines": {
"node": ">= 6"
},
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"devDependencies": {
"@babel/cli": "^7.2.0",
"@babel/core": "^7.2.0",
"@babel/polyfill": "^7.0.0",
"@babel/preset-env": "^7.2.0",
"airtap": "0.0.9",
"assert": "^1.4.0",
"bl": "^2.0.0",
"deep-strict-equal": "^0.2.0",
"events.once": "^2.0.2",
"glob": "^7.1.2",
"gunzip-maybe": "^1.4.1",
"hyperquest": "^2.1.3",
"lolex": "^2.6.0",
"nyc": "^11.0.0",
"pump": "^3.0.0",
"rimraf": "^2.6.2",
"tap": "^12.0.0",
"tape": "^4.9.0",
"tar-fs": "^1.16.2",
"util-promisify": "^2.1.0"
},
"scripts": {
"test": "tap -J --no-esm test/parallel/*.js test/ours/*.js",
"ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap",
"test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js",
"test-browser-local": "airtap --open --local -- test/browser.js",
"cover": "nyc npm test",
"report": "nyc report --reporter=lcov",
"update-browser-errors": "babel -o errors-browser.js errors.js"
},
"repository": {
"type": "git",
"url": "git://github.com/nodejs/readable-stream"
},
"keywords": [
"readable",
"stream",
"pipe"
],
"browser": {
"util": false,
"worker_threads": false,
"./errors": "./errors-browser.js",
"./readable.js": "./readable-browser.js",
"./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js",
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
},
"nyc": {
"include": [
"lib/**.js"
]
},
"license": "MIT"
}

View File

@@ -0,0 +1,9 @@
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
exports.finished = require('./lib/internal/streams/end-of-stream.js');
exports.pipeline = require('./lib/internal/streams/pipeline.js');

View File

@@ -0,0 +1,16 @@
var Stream = require('stream');
if (process.env.READABLE_STREAM === 'disable' && Stream) {
module.exports = Stream.Readable;
Object.assign(module.exports, Stream);
module.exports.Stream = Stream;
} else {
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
exports.finished = require('./lib/internal/streams/end-of-stream.js');
exports.pipeline = require('./lib/internal/streams/pipeline.js');
}

View File

@@ -0,0 +1,6 @@
language: node_js
node_js:
- 8
- 10
- 12
- 14

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,165 @@
# tar-fs
filesystem bindings for [tar-stream](https://github.com/mafintosh/tar-stream).
```
npm install tar-fs
```
[![build status](https://secure.travis-ci.org/mafintosh/tar-fs.png)](http://travis-ci.org/mafintosh/tar-fs)
## Usage
tar-fs allows you to pack directories into tarballs and extract tarballs into directories.
It doesn't gunzip for you, so if you want to extract a `.tar.gz` with this you'll need to use something like [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in addition to this.
``` js
var tar = require('tar-fs')
var fs = require('fs')
// packing a directory
tar.pack('./my-directory').pipe(fs.createWriteStream('my-tarball.tar'))
// extracting a directory
fs.createReadStream('my-other-tarball.tar').pipe(tar.extract('./my-other-directory'))
```
To ignore various files when packing or extracting add a ignore function to the options. `ignore`
is also an alias for `filter`. Additionally you get `header` if you use ignore while extracting.
That way you could also filter by metadata.
``` js
var pack = tar.pack('./my-directory', {
ignore: function(name) {
return path.extname(name) === '.bin' // ignore .bin files when packing
}
})
var extract = tar.extract('./my-other-directory', {
ignore: function(name) {
return path.extname(name) === '.bin' // ignore .bin files inside the tarball when extracing
}
})
var extractFilesDirs = tar.extract('./my-other-other-directory', {
ignore: function(_, header) {
// pass files & directories, ignore e.g. symlinks
return header.type !== 'file' && header.type !== 'directory'
}
})
```
You can also specify which entries to pack using the `entries` option
```js
var pack = tar.pack('./my-directory', {
entries: ['file1', 'subdir/file2'] // only the specific entries will be packed
})
```
If you want to modify the headers when packing/extracting add a map function to the options
``` js
var pack = tar.pack('./my-directory', {
map: function(header) {
header.name = 'prefixed/'+header.name
return header
}
})
var extract = tar.extract('./my-directory', {
map: function(header) {
header.name = 'another-prefix/'+header.name
return header
}
})
```
Similarly you can use `mapStream` incase you wanna modify the input/output file streams
``` js
var pack = tar.pack('./my-directory', {
mapStream: function(fileStream, header) {
// NOTE: the returned stream HAS to have the same length as the input stream.
// If not make sure to update the size in the header passed in here.
if (path.extname(header.name) === '.js') {
return fileStream.pipe(someTransform)
}
return fileStream;
}
})
var extract = tar.extract('./my-directory', {
mapStream: function(fileStream, header) {
if (path.extname(header.name) === '.js') {
return fileStream.pipe(someTransform)
}
return fileStream;
}
})
```
Set `options.fmode` and `options.dmode` to ensure that files/directories extracted have the corresponding modes
``` js
var extract = tar.extract('./my-directory', {
dmode: parseInt(555, 8), // all dirs should be readable
fmode: parseInt(444, 8) // all files should be readable
})
```
It can be useful to use `dmode` and `fmode` if you are packing/unpacking tarballs between *nix/windows to ensure that all files/directories unpacked are readable.
Alternatively you can set `options.readable` and/or `options.writable` to set the dmode and fmode to readable/writable.
``` js
var extract = tar.extract('./my-directory', {
readable: true, // all dirs and files should be readable
writable: true, // all dirs and files should be writable
})
```
Set `options.strict` to `false` if you want to ignore errors due to unsupported entry types (like device files)
To dereference symlinks (pack the contents of the symlink instead of the link itself) set `options.dereference` to `true`.
## Copy a directory
Copying a directory with permissions and mtime intact is as simple as
``` js
tar.pack('source-directory').pipe(tar.extract('dest-directory'))
```
## Interaction with [`tar-stream`](https://github.com/mafintosh/tar-stream)
Use `finalize: false` and the `finish` hook to
leave the pack stream open for further entries (see
[`tar-stream#pack`](https://github.com/mafintosh/tar-stream#packing)),
and use `pack` to pass an existing pack stream.
``` js
var mypack = tar.pack('./my-directory', {
finalize: false,
finish: function(sameAsMypack) {
mypack.entry({name: 'generated-file.txt'}, "hello")
tar.pack('./other-directory', {
pack: sameAsMypack
})
}
})
```
## Performance
Packing and extracting a 6.1 GB with 2496 directories and 2398 files yields the following results on my Macbook Air.
[See the benchmark here](https://gist.github.com/mafintosh/8102201)
* tar-fs: 34.261 seconds
* [node-tar](https://github.com/isaacs/node-tar): 366.123 seconds (or 10x slower)
## License
MIT

View File

@@ -0,0 +1,363 @@
var chownr = require('chownr')
var tar = require('tar-stream')
var pump = require('pump')
var mkdirp = require('mkdirp-classic')
var fs = require('fs')
var path = require('path')
var os = require('os')
var win32 = os.platform() === 'win32'
var noop = function () {}
var echo = function (name) {
return name
}
var normalize = !win32 ? echo : function (name) {
return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_')
}
var statAll = function (fs, stat, cwd, ignore, entries, sort) {
var queue = entries || ['.']
return function loop (callback) {
if (!queue.length) return callback()
var next = queue.shift()
var nextAbs = path.join(cwd, next)
stat.call(fs, nextAbs, function (err, stat) {
if (err) return callback(err)
if (!stat.isDirectory()) return callback(null, next, stat)
fs.readdir(nextAbs, function (err, files) {
if (err) return callback(err)
if (sort) files.sort()
for (var i = 0; i < files.length; i++) {
if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i]))
}
callback(null, next, stat)
})
})
}
}
var strip = function (map, level) {
return function (header) {
header.name = header.name.split('/').slice(level).join('/')
var linkname = header.linkname
if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) {
header.linkname = linkname.split('/').slice(level).join('/')
}
return map(header)
}
}
exports.pack = function (cwd, opts) {
if (!cwd) cwd = '.'
if (!opts) opts = {}
var xfs = opts.fs || fs
var ignore = opts.ignore || opts.filter || noop
var map = opts.map || noop
var mapStream = opts.mapStream || echo
var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort)
var strict = opts.strict !== false
var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
var pack = opts.pack || tar.pack()
var finish = opts.finish || noop
if (opts.strip) map = strip(map, opts.strip)
if (opts.readable) {
dmode |= parseInt(555, 8)
fmode |= parseInt(444, 8)
}
if (opts.writable) {
dmode |= parseInt(333, 8)
fmode |= parseInt(222, 8)
}
var onsymlink = function (filename, header) {
xfs.readlink(path.join(cwd, filename), function (err, linkname) {
if (err) return pack.destroy(err)
header.linkname = normalize(linkname)
pack.entry(header, onnextentry)
})
}
var onstat = function (err, filename, stat) {
if (err) return pack.destroy(err)
if (!filename) {
if (opts.finalize !== false) pack.finalize()
return finish(pack)
}
if (stat.isSocket()) return onnextentry() // tar does not support sockets...
var header = {
name: normalize(filename),
mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask,
mtime: stat.mtime,
size: stat.size,
type: 'file',
uid: stat.uid,
gid: stat.gid
}
if (stat.isDirectory()) {
header.size = 0
header.type = 'directory'
header = map(header) || header
return pack.entry(header, onnextentry)
}
if (stat.isSymbolicLink()) {
header.size = 0
header.type = 'symlink'
header = map(header) || header
return onsymlink(filename, header)
}
// TODO: add fifo etc...
header = map(header) || header
if (!stat.isFile()) {
if (strict) return pack.destroy(new Error('unsupported type for ' + filename))
return onnextentry()
}
var entry = pack.entry(header, onnextentry)
if (!entry) return
var rs = mapStream(xfs.createReadStream(path.join(cwd, filename), { start: 0, end: header.size > 0 ? header.size - 1 : header.size }), header)
rs.on('error', function (err) { // always forward errors on destroy
entry.destroy(err)
})
pump(rs, entry)
}
var onnextentry = function (err) {
if (err) return pack.destroy(err)
statNext(onstat)
}
onnextentry()
return pack
}
var head = function (list) {
return list.length ? list[list.length - 1] : null
}
var processGetuid = function () {
return process.getuid ? process.getuid() : -1
}
var processUmask = function () {
return process.umask ? process.umask() : 0
}
exports.extract = function (cwd, opts) {
if (!cwd) cwd = '.'
if (!opts) opts = {}
var xfs = opts.fs || fs
var ignore = opts.ignore || opts.filter || noop
var map = opts.map || noop
var mapStream = opts.mapStream || echo
var own = opts.chown !== false && !win32 && processGetuid() === 0
var extract = opts.extract || tar.extract()
var stack = []
var now = new Date()
var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
var strict = opts.strict !== false
if (opts.strip) map = strip(map, opts.strip)
if (opts.readable) {
dmode |= parseInt(555, 8)
fmode |= parseInt(444, 8)
}
if (opts.writable) {
dmode |= parseInt(333, 8)
fmode |= parseInt(222, 8)
}
var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry
var top
while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop()
if (!top) return cb()
xfs.utimes(top[0], now, top[1], cb)
}
var utimes = function (name, header, cb) {
if (opts.utimes === false) return cb()
if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb)
if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link?
xfs.utimes(name, now, header.mtime, function (err) {
if (err) return cb(err)
utimesParent(name, cb)
})
}
var chperm = function (name, header, cb) {
var link = header.type === 'symlink'
/* eslint-disable node/no-deprecated-api */
var chmod = link ? xfs.lchmod : xfs.chmod
var chown = link ? xfs.lchown : xfs.chown
/* eslint-enable node/no-deprecated-api */
if (!chmod) return cb()
var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask
if (chown && own) chown.call(xfs, name, header.uid, header.gid, onchown)
else onchown(null)
function onchown (err) {
if (err) return cb(err)
if (!chmod) return cb()
chmod.call(xfs, name, mode, cb)
}
}
extract.on('entry', function (header, stream, next) {
header = map(header) || header
header.name = normalize(header.name)
var name = path.join(cwd, path.join('/', header.name))
if (ignore(name, header)) {
stream.resume()
return next()
}
var stat = function (err) {
if (err) return next(err)
utimes(name, header, function (err) {
if (err) return next(err)
if (win32) return next()
chperm(name, header, next)
})
}
var onsymlink = function () {
if (win32) return next() // skip symlinks on win for now before it can be tested
xfs.unlink(name, function () {
var dst = path.resolve(path.dirname(name), header.linkname)
if (!inCwd(dst, cwd)) return next(new Error(name + ' is not a valid symlink'))
xfs.symlink(header.linkname, name, stat)
})
}
var onlink = function () {
if (win32) return next() // skip links on win for now before it can be tested
xfs.unlink(name, function () {
var srcpath = path.join(cwd, path.join('/', header.linkname))
xfs.realpath(srcpath, function (err, dst) {
if (err || !inCwd(dst, cwd)) return next(new Error(name + ' is not a valid hardlink'))
xfs.link(dst, name, function (err) {
if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {
stream = xfs.createReadStream(srcpath)
return onfile()
}
stat(err)
})
})
})
}
var onfile = function () {
var ws = xfs.createWriteStream(name)
var rs = mapStream(stream, header)
ws.on('error', function (err) { // always forward errors on destroy
rs.destroy(err)
})
pump(rs, ws, function (err) {
if (err) return next(err)
ws.on('close', stat)
})
}
if (header.type === 'directory') {
stack.push([name, header.mtime])
return mkdirfix(name, {
fs: xfs, own: own, uid: header.uid, gid: header.gid
}, stat)
}
var dir = path.dirname(name)
validate(xfs, dir, path.join(cwd, '.'), function (err, valid) {
if (err) return next(err)
if (!valid) return next(new Error(dir + ' is not a valid path'))
mkdirfix(dir, {
fs: xfs, own: own, uid: header.uid, gid: header.gid
}, function (err) {
if (err) return next(err)
switch (header.type) {
case 'file': return onfile()
case 'link': return onlink()
case 'symlink': return onsymlink()
}
if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')'))
stream.resume()
next()
})
})
})
if (opts.finish) extract.on('finish', opts.finish)
return extract
}
function validate (fs, name, root, cb) {
if (name === root) return cb(null, true)
fs.lstat(name, function (err, st) {
if (err && err.code !== 'ENOENT') return cb(err)
if (err || st.isDirectory()) return validate(fs, path.join(name, '..'), root, cb)
cb(null, false)
})
}
function mkdirfix (name, opts, cb) {
mkdirp(name, { fs: opts.fs }, function (err, made) {
if (!err && made && opts.own) {
chownr(made, opts.uid, opts.gid, cb)
} else {
cb(err)
}
})
}
function inCwd (dst, cwd) {
cwd = path.resolve(cwd)
return cwd === dst || dst.startsWith(cwd + path.sep)
}

View File

@@ -0,0 +1,41 @@
{
"name": "tar-fs",
"version": "2.1.4",
"description": "filesystem bindings for tar-stream",
"dependencies": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.1.4"
},
"keywords": [
"tar",
"fs",
"file",
"tarball",
"directory",
"stream"
],
"devDependencies": {
"rimraf": "^2.6.3",
"standard": "^13.0.1",
"tape": "^4.9.2"
},
"scripts": {
"test": "standard && tape test/index.js"
},
"bugs": {
"url": "https://github.com/mafintosh/tar-fs/issues"
},
"homepage": "https://github.com/mafintosh/tar-fs",
"main": "index.js",
"directories": {
"test": "test"
},
"author": "Mathias Buus",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/mafintosh/tar-fs.git"
}
}

View File

@@ -0,0 +1 @@
hello world

View File

@@ -0,0 +1 @@
test

Binary file not shown.

View File

@@ -0,0 +1,346 @@
var test = require('tape')
var rimraf = require('rimraf')
var tar = require('../index')
var tarStream = require('tar-stream')
var path = require('path')
var fs = require('fs')
var os = require('os')
var win32 = os.platform() === 'win32'
var mtime = function (st) {
return Math.floor(st.mtime.getTime() / 1000)
}
test('copy a -> copy/a', function (t) {
t.plan(5)
var a = path.join(__dirname, 'fixtures', 'a')
var b = path.join(__dirname, 'fixtures', 'copy', 'a')
rimraf.sync(b)
tar.pack(a)
.pipe(tar.extract(b))
.on('finish', function () {
var files = fs.readdirSync(b)
t.same(files.length, 1)
t.same(files[0], 'hello.txt')
var fileB = path.join(b, files[0])
var fileA = path.join(a, files[0])
t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
})
})
test('copy b -> copy/b', function (t) {
t.plan(8)
var a = path.join(__dirname, 'fixtures', 'b')
var b = path.join(__dirname, 'fixtures', 'copy', 'b')
rimraf.sync(b)
tar.pack(a)
.pipe(tar.extract(b))
.on('finish', function () {
var files = fs.readdirSync(b)
t.same(files.length, 1)
t.same(files[0], 'a')
var dirB = path.join(b, files[0])
var dirA = path.join(a, files[0])
t.same(fs.statSync(dirB).mode, fs.statSync(dirA).mode)
t.same(mtime(fs.statSync(dirB)), mtime(fs.statSync(dirA)))
t.ok(fs.statSync(dirB).isDirectory())
var fileB = path.join(dirB, 'test.txt')
var fileA = path.join(dirA, 'test.txt')
t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
})
})
test('symlink', function (t) {
if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
t.plan(1)
t.ok(true)
return
}
t.plan(5)
var a = path.join(__dirname, 'fixtures', 'c')
rimraf.sync(path.join(a, 'link'))
fs.symlinkSync('.gitignore', path.join(a, 'link'))
var b = path.join(__dirname, 'fixtures', 'copy', 'c')
rimraf.sync(b)
tar.pack(a)
.pipe(tar.extract(b))
.on('finish', function () {
var files = fs.readdirSync(b).sort()
t.same(files.length, 2)
t.same(files[0], '.gitignore')
t.same(files[1], 'link')
var linkA = path.join(a, 'link')
var linkB = path.join(b, 'link')
t.same(mtime(fs.lstatSync(linkB)), mtime(fs.lstatSync(linkA)))
t.same(fs.readlinkSync(linkB), fs.readlinkSync(linkA))
})
})
test('follow symlinks', function (t) {
if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
t.plan(1)
t.ok(true)
return
}
t.plan(5)
var a = path.join(__dirname, 'fixtures', 'c')
rimraf.sync(path.join(a, 'link'))
fs.symlinkSync('.gitignore', path.join(a, 'link'))
var b = path.join(__dirname, 'fixtures', 'copy', 'c-dereference')
rimraf.sync(b)
tar.pack(a, { dereference: true })
.pipe(tar.extract(b))
.on('finish', function () {
var files = fs.readdirSync(b).sort()
t.same(files.length, 2)
t.same(files[0], '.gitignore')
t.same(files[1], 'link')
var file1 = path.join(b, '.gitignore')
var file2 = path.join(b, 'link')
t.same(mtime(fs.lstatSync(file1)), mtime(fs.lstatSync(file2)))
t.same(fs.readFileSync(file1), fs.readFileSync(file2))
})
})
test('strip', function (t) {
t.plan(2)
var a = path.join(__dirname, 'fixtures', 'b')
var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
rimraf.sync(b)
tar.pack(a)
.pipe(tar.extract(b, { strip: 1 }))
.on('finish', function () {
var files = fs.readdirSync(b).sort()
t.same(files.length, 1)
t.same(files[0], 'test.txt')
})
})
test('strip + map', function (t) {
t.plan(2)
var a = path.join(__dirname, 'fixtures', 'b')
var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
rimraf.sync(b)
var uppercase = function (header) {
header.name = header.name.toUpperCase()
return header
}
tar.pack(a)
.pipe(tar.extract(b, { strip: 1, map: uppercase }))
.on('finish', function () {
var files = fs.readdirSync(b).sort()
t.same(files.length, 1)
t.same(files[0], 'TEST.TXT')
})
})
test('map + dir + permissions', function (t) {
t.plan(win32 ? 1 : 2) // skip chmod test, it's not working like unix
var a = path.join(__dirname, 'fixtures', 'b')
var b = path.join(__dirname, 'fixtures', 'copy', 'a-perms')
rimraf.sync(b)
var aWithMode = function (header) {
if (header.name === 'a') {
header.mode = parseInt(700, 8)
}
return header
}
tar.pack(a)
.pipe(tar.extract(b, { map: aWithMode }))
.on('finish', function () {
var files = fs.readdirSync(b).sort()
var stat = fs.statSync(path.join(b, 'a'))
t.same(files.length, 1)
if (!win32) {
t.same(stat.mode & parseInt(777, 8), parseInt(700, 8))
}
})
})
test('specific entries', function (t) {
t.plan(6)
var a = path.join(__dirname, 'fixtures', 'd')
var b = path.join(__dirname, 'fixtures', 'copy', 'd-entries')
var entries = ['file1', 'sub-files/file3', 'sub-dir']
rimraf.sync(b)
tar.pack(a, { entries: entries })
.pipe(tar.extract(b))
.on('finish', function () {
var files = fs.readdirSync(b)
t.same(files.length, 3)
t.notSame(files.indexOf('file1'), -1)
t.notSame(files.indexOf('sub-files'), -1)
t.notSame(files.indexOf('sub-dir'), -1)
var subFiles = fs.readdirSync(path.join(b, 'sub-files'))
t.same(subFiles, ['file3'])
var subDir = fs.readdirSync(path.join(b, 'sub-dir'))
t.same(subDir, ['file5'])
})
})
test('check type while mapping header on packing', function (t) {
t.plan(3)
var e = path.join(__dirname, 'fixtures', 'e')
var checkHeaderType = function (header) {
if (header.name.indexOf('.') === -1) t.same(header.type, header.name)
}
tar.pack(e, { map: checkHeaderType })
})
test('finish callbacks', function (t) {
t.plan(3)
var a = path.join(__dirname, 'fixtures', 'a')
var b = path.join(__dirname, 'fixtures', 'copy', 'a')
rimraf.sync(b)
var packEntries = 0
var extractEntries = 0
var countPackEntry = function (header) { packEntries++ }
var countExtractEntry = function (header) { extractEntries++ }
var pack
var onPackFinish = function (passedPack) {
t.equal(packEntries, 2, 'All entries have been packed') // 2 entries - the file and base directory
t.equal(passedPack, pack, 'The finish hook passes the pack')
}
var onExtractFinish = function () { t.equal(extractEntries, 2) }
pack = tar.pack(a, { map: countPackEntry, finish: onPackFinish })
pack.pipe(tar.extract(b, { map: countExtractEntry, finish: onExtractFinish }))
.on('finish', function () {
t.end()
})
})
test('not finalizing the pack', function (t) {
t.plan(2)
var a = path.join(__dirname, 'fixtures', 'a')
var b = path.join(__dirname, 'fixtures', 'b')
var out = path.join(__dirname, 'fixtures', 'copy', 'merged-packs')
rimraf.sync(out)
var prefixer = function (prefix) {
return function (header) {
header.name = path.join(prefix, header.name)
return header
}
}
tar.pack(a, {
map: prefixer('a-files'),
finalize: false,
finish: packB
})
function packB (pack) {
tar.pack(b, { pack: pack, map: prefixer('b-files') })
.pipe(tar.extract(out))
.on('finish', assertResults)
}
function assertResults () {
var containers = fs.readdirSync(out)
t.deepEqual(containers, ['a-files', 'b-files'])
var aFiles = fs.readdirSync(path.join(out, 'a-files'))
t.deepEqual(aFiles, ['hello.txt'])
}
})
test('do not extract invalid tar', function (t) {
var a = path.join(__dirname, 'fixtures', 'invalid.tar')
var out = path.join(__dirname, 'fixtures', 'invalid')
rimraf.sync(out)
fs.createReadStream(a)
.pipe(tar.extract(out))
.on('error', function (err) {
t.ok(/is not a valid symlink/i.test(err.message))
fs.stat(path.join(out, '../bar'), function (err) {
t.ok(err)
t.end()
})
})
})
test('no abs hardlink targets', function (t) {
var out = path.join(__dirname, 'fixtures', 'invalid')
var outside = path.join(__dirname, 'fixtures', 'outside')
rimraf.sync(out)
var s = tarStream.pack()
fs.writeFileSync(outside, 'something')
s.entry({
type: 'link',
name: 'link',
linkname: outside
})
s.entry({
name: 'link'
}, 'overwrite')
s.finalize()
s.pipe(tar.extract(out))
.on('error', function (err) {
t.ok(err, 'had error')
fs.readFile(outside, 'utf-8', function (err, str) {
t.error(err, 'no error')
t.same(str, 'something')
t.end()
})
})
})

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,168 @@
# tar-stream
tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
```
npm install tar-stream
```
[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT)
## Usage
tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
## Related
If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
## Packing
To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
``` js
var tar = require('tar-stream')
var pack = tar.pack() // pack is a streams2 stream
// add a file called my-test.txt with the content "Hello World!"
pack.entry({ name: 'my-test.txt' }, 'Hello World!')
// add a file called my-stream-test.txt from a stream
var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
// the stream was added
// no more entries
pack.finalize()
})
entry.write('hello')
entry.write(' ')
entry.write('world')
entry.end()
// pipe the pack stream somewhere
pack.pipe(process.stdout)
```
## Extracting
To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )`
``` js
var extract = tar.extract()
extract.on('entry', function(header, stream, next) {
// header is the tar header
// stream is the content body (might be an empty stream)
// call next when you are done with this entry
stream.on('end', function() {
next() // ready for next entry
})
stream.resume() // just auto drain the stream
})
extract.on('finish', function() {
// all entries read
})
pack.pipe(extract)
```
The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading.
## Headers
The header object using in `entry` should contain the following properties.
Most of these values can be found by stat'ing a file.
``` js
{
name: 'path/to/this/entry.txt',
size: 1314, // entry size. defaults to 0
mode: 0o644, // entry mode. defaults to to 0o755 for dirs and 0o644 otherwise
mtime: new Date(), // last modified date for entry. defaults to now.
type: 'file', // type of entry. defaults to file. can be:
// file | link | symlink | directory | block-device
// character-device | fifo | contiguous-file
linkname: 'path', // linked file name
uid: 0, // uid of entry owner. defaults to 0
gid: 0, // gid of entry owner. defaults to 0
uname: 'maf', // uname of entry owner. defaults to null
gname: 'staff', // gname of entry owner. defaults to null
devmajor: 0, // device major version. defaults to 0
devminor: 0 // device minor version. defaults to 0
}
```
## Modifying existing tarballs
Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
``` js
var extract = tar.extract()
var pack = tar.pack()
var path = require('path')
extract.on('entry', function(header, stream, callback) {
// let's prefix all names with 'tmp'
header.name = path.join('tmp', header.name)
// write the new entry to the pack stream
stream.pipe(pack.entry(header, callback))
})
extract.on('finish', function() {
// all entries done - lets finalize it
pack.finalize()
})
// pipe the old tarball to the extractor
oldTarballStream.pipe(extract)
// pipe the new tarball the another stream
pack.pipe(newTarballStream)
```
## Saving tarball to fs
``` js
var fs = require('fs')
var tar = require('tar-stream')
var pack = tar.pack() // pack is a streams2 stream
var path = 'YourTarBall.tar'
var yourTarball = fs.createWriteStream(path)
// add a file called YourFile.txt with the content "Hello World!"
pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
if (err) throw err
pack.finalize()
})
// pipe the pack stream to your file
pack.pipe(yourTarball)
yourTarball.on('close', function () {
console.log(path + ' has been written')
fs.stat(path, function(err, stats) {
if (err) throw err
console.log(stats)
console.log('Got file info successfully!')
})
})
```
## Performance
[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
# License
MIT

View File

@@ -0,0 +1,257 @@
var util = require('util')
var bl = require('bl')
var headers = require('./headers')
var Writable = require('readable-stream').Writable
var PassThrough = require('readable-stream').PassThrough
var noop = function () {}
var overflow = function (size) {
size &= 511
return size && 512 - size
}
var emptyStream = function (self, offset) {
var s = new Source(self, offset)
s.end()
return s
}
var mixinPax = function (header, pax) {
if (pax.path) header.name = pax.path
if (pax.linkpath) header.linkname = pax.linkpath
if (pax.size) header.size = parseInt(pax.size, 10)
header.pax = pax
return header
}
var Source = function (self, offset) {
this._parent = self
this.offset = offset
PassThrough.call(this, { autoDestroy: false })
}
util.inherits(Source, PassThrough)
Source.prototype.destroy = function (err) {
this._parent.destroy(err)
}
var Extract = function (opts) {
if (!(this instanceof Extract)) return new Extract(opts)
Writable.call(this, opts)
opts = opts || {}
this._offset = 0
this._buffer = bl()
this._missing = 0
this._partial = false
this._onparse = noop
this._header = null
this._stream = null
this._overflow = null
this._cb = null
this._locked = false
this._destroyed = false
this._pax = null
this._paxGlobal = null
this._gnuLongPath = null
this._gnuLongLinkPath = null
var self = this
var b = self._buffer
var oncontinue = function () {
self._continue()
}
var onunlock = function (err) {
self._locked = false
if (err) return self.destroy(err)
if (!self._stream) oncontinue()
}
var onstreamend = function () {
self._stream = null
var drain = overflow(self._header.size)
if (drain) self._parse(drain, ondrain)
else self._parse(512, onheader)
if (!self._locked) oncontinue()
}
var ondrain = function () {
self._buffer.consume(overflow(self._header.size))
self._parse(512, onheader)
oncontinue()
}
var onpaxglobalheader = function () {
var size = self._header.size
self._paxGlobal = headers.decodePax(b.slice(0, size))
b.consume(size)
onstreamend()
}
var onpaxheader = function () {
var size = self._header.size
self._pax = headers.decodePax(b.slice(0, size))
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
b.consume(size)
onstreamend()
}
var ongnulongpath = function () {
var size = self._header.size
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
var ongnulonglinkpath = function () {
var size = self._header.size
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
var onheader = function () {
var offset = self._offset
var header
try {
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
} catch (err) {
self.emit('error', err)
}
b.consume(512)
if (!header) {
self._parse(512, onheader)
oncontinue()
return
}
if (header.type === 'gnu-long-path') {
self._parse(header.size, ongnulongpath)
oncontinue()
return
}
if (header.type === 'gnu-long-link-path') {
self._parse(header.size, ongnulonglinkpath)
oncontinue()
return
}
if (header.type === 'pax-global-header') {
self._parse(header.size, onpaxglobalheader)
oncontinue()
return
}
if (header.type === 'pax-header') {
self._parse(header.size, onpaxheader)
oncontinue()
return
}
if (self._gnuLongPath) {
header.name = self._gnuLongPath
self._gnuLongPath = null
}
if (self._gnuLongLinkPath) {
header.linkname = self._gnuLongLinkPath
self._gnuLongLinkPath = null
}
if (self._pax) {
self._header = header = mixinPax(header, self._pax)
self._pax = null
}
self._locked = true
if (!header.size || header.type === 'directory') {
self._parse(512, onheader)
self.emit('entry', header, emptyStream(self, offset), onunlock)
return
}
self._stream = new Source(self, offset)
self.emit('entry', header, self._stream, onunlock)
self._parse(header.size, onstreamend)
oncontinue()
}
this._onheader = onheader
this._parse(512, onheader)
}
util.inherits(Extract, Writable)
Extract.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
if (err) this.emit('error', err)
this.emit('close')
if (this._stream) this._stream.emit('close')
}
Extract.prototype._parse = function (size, onparse) {
if (this._destroyed) return
this._offset += size
this._missing = size
if (onparse === this._onheader) this._partial = false
this._onparse = onparse
}
Extract.prototype._continue = function () {
if (this._destroyed) return
var cb = this._cb
this._cb = noop
if (this._overflow) this._write(this._overflow, undefined, cb)
else cb()
}
Extract.prototype._write = function (data, enc, cb) {
if (this._destroyed) return
var s = this._stream
var b = this._buffer
var missing = this._missing
if (data.length) this._partial = true
// we do not reach end-of-chunk now. just forward it
if (data.length < missing) {
this._missing -= data.length
this._overflow = null
if (s) return s.write(data, cb)
b.append(data)
return cb()
}
// end-of-chunk. the parser should call cb.
this._cb = cb
this._missing = 0
var overflow = null
if (data.length > missing) {
overflow = data.slice(missing)
data = data.slice(0, missing)
}
if (s) s.end(data)
else b.append(data)
this._overflow = overflow
this._onparse()
}
Extract.prototype._final = function (cb) {
if (this._partial) return this.destroy(new Error('Unexpected end of data'))
cb()
}
module.exports = Extract

View File

@@ -0,0 +1,295 @@
var alloc = Buffer.alloc
var ZEROS = '0000000000000000000'
var SEVENS = '7777777777777777777'
var ZERO_OFFSET = '0'.charCodeAt(0)
var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary')
var USTAR_VER = Buffer.from('00', 'binary')
var GNU_MAGIC = Buffer.from('ustar\x20', 'binary')
var GNU_VER = Buffer.from('\x20\x00', 'binary')
var MASK = parseInt('7777', 8)
var MAGIC_OFFSET = 257
var VERSION_OFFSET = 263
var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
if (index >= 0) return index
index += len
if (index >= 0) return index
return 0
}
var toType = function (flag) {
switch (flag) {
case 0:
return 'file'
case 1:
return 'link'
case 2:
return 'symlink'
case 3:
return 'character-device'
case 4:
return 'block-device'
case 5:
return 'directory'
case 6:
return 'fifo'
case 7:
return 'contiguous-file'
case 72:
return 'pax-header'
case 55:
return 'pax-global-header'
case 27:
return 'gnu-long-link-path'
case 28:
case 30:
return 'gnu-long-path'
}
return null
}
var toTypeflag = function (flag) {
switch (flag) {
case 'file':
return 0
case 'link':
return 1
case 'symlink':
return 2
case 'character-device':
return 3
case 'block-device':
return 4
case 'directory':
return 5
case 'fifo':
return 6
case 'contiguous-file':
return 7
case 'pax-header':
return 72
}
return 0
}
var indexOf = function (block, num, offset, end) {
for (; offset < end; offset++) {
if (block[offset] === num) return offset
}
return end
}
var cksum = function (block) {
var sum = 8 * 32
for (var i = 0; i < 148; i++) sum += block[i]
for (var j = 156; j < 512; j++) sum += block[j]
return sum
}
var encodeOct = function (val, n) {
val = val.toString(8)
if (val.length > n) return SEVENS.slice(0, n) + ' '
else return ZEROS.slice(0, n - val.length) + val + ' '
}
/* Copied from the node-tar repo and modified to meet
* tar-stream coding standard.
*
* Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
*/
function parse256 (buf) {
// first byte MUST be either 80 or FF
// 80 for positive, FF for 2's comp
var positive
if (buf[0] === 0x80) positive = true
else if (buf[0] === 0xFF) positive = false
else return null
// build up a base-256 tuple from the least sig to the highest
var tuple = []
for (var i = buf.length - 1; i > 0; i--) {
var byte = buf[i]
if (positive) tuple.push(byte)
else tuple.push(0xFF - byte)
}
var sum = 0
var l = tuple.length
for (i = 0; i < l; i++) {
sum += tuple[i] * Math.pow(256, i)
}
return positive ? sum : -1 * sum
}
var decodeOct = function (val, offset, length) {
val = val.slice(offset, offset + length)
offset = 0
// If prefixed with 0x80 then parse as a base-256 integer
if (val[offset] & 0x80) {
return parse256(val)
} else {
// Older versions of tar can prefix with spaces
while (offset < val.length && val[offset] === 32) offset++
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
while (offset < end && val[offset] === 0) offset++
if (end === offset) return 0
return parseInt(val.slice(offset, end).toString(), 8)
}
}
var decodeStr = function (val, offset, length, encoding) {
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
}
var addLength = function (str) {
var len = Buffer.byteLength(str)
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
if (len + digits >= Math.pow(10, digits)) digits++
return (len + digits) + str
}
exports.decodeLongPath = function (buf, encoding) {
return decodeStr(buf, 0, buf.length, encoding)
}
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
var result = ''
if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
var pax = opts.pax
if (pax) {
for (var key in pax) {
result += addLength(' ' + key + '=' + pax[key] + '\n')
}
}
return Buffer.from(result)
}
exports.decodePax = function (buf) {
var result = {}
while (buf.length) {
var i = 0
while (i < buf.length && buf[i] !== 32) i++
var len = parseInt(buf.slice(0, i).toString(), 10)
if (!len) return result
var b = buf.slice(i + 1, len - 1).toString()
var keyIndex = b.indexOf('=')
if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
buf = buf.slice(len)
}
return result
}
exports.encode = function (opts) {
var buf = alloc(512)
var name = opts.name
var prefix = ''
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (Buffer.byteLength(name) !== name.length) return null // utf-8
while (Buffer.byteLength(name) > 100) {
var i = name.indexOf('/')
if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i + 1)
}
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
buf.write(name)
buf.write(encodeOct(opts.mode & MASK, 6), 100)
buf.write(encodeOct(opts.uid, 6), 108)
buf.write(encodeOct(opts.gid, 6), 116)
buf.write(encodeOct(opts.size, 11), 124)
buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
if (opts.linkname) buf.write(opts.linkname, 157)
USTAR_MAGIC.copy(buf, MAGIC_OFFSET)
USTAR_VER.copy(buf, VERSION_OFFSET)
if (opts.uname) buf.write(opts.uname, 265)
if (opts.gname) buf.write(opts.gname, 297)
buf.write(encodeOct(opts.devmajor || 0, 6), 329)
buf.write(encodeOct(opts.devminor || 0, 6), 337)
if (prefix) buf.write(prefix, 345)
buf.write(encodeOct(cksum(buf), 6), 148)
return buf
}
exports.decode = function (buf, filenameEncoding, allowUnknownFormat) {
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
var name = decodeStr(buf, 0, 100, filenameEncoding)
var mode = decodeOct(buf, 100, 8)
var uid = decodeOct(buf, 108, 8)
var gid = decodeOct(buf, 116, 8)
var size = decodeOct(buf, 124, 12)
var mtime = decodeOct(buf, 136, 12)
var type = toType(typeflag)
var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
var uname = decodeStr(buf, 265, 32)
var gname = decodeStr(buf, 297, 32)
var devmajor = decodeOct(buf, 329, 8)
var devminor = decodeOct(buf, 337, 8)
var c = cksum(buf)
// checksum is still initial value if header was null.
if (c === 8 * 32) return null
// valid checksum
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) {
// ustar (posix) format.
// prepend prefix, if present.
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
} else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 &&
GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) {
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
// multi-volume tarballs.
} else {
if (!allowUnknownFormat) {
throw new Error('Invalid tar header: unknown format.')
}
}
// to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
return {
name,
mode,
uid,
gid,
size,
mtime: new Date(1000 * mtime),
type,
linkname,
uname,
gname,
devmajor,
devminor
}
}

View File

@@ -0,0 +1,2 @@
exports.extract = require('./extract')
exports.pack = require('./pack')

View File

@@ -0,0 +1,255 @@
var constants = require('fs-constants')
var eos = require('end-of-stream')
var inherits = require('inherits')
var alloc = Buffer.alloc
var Readable = require('readable-stream').Readable
var Writable = require('readable-stream').Writable
var StringDecoder = require('string_decoder').StringDecoder
var headers = require('./headers')
var DMODE = parseInt('755', 8)
var FMODE = parseInt('644', 8)
var END_OF_TAR = alloc(1024)
var noop = function () {}
var overflow = function (self, size) {
size &= 511
if (size) self.push(END_OF_TAR.slice(0, 512 - size))
}
function modeToType (mode) {
switch (mode & constants.S_IFMT) {
case constants.S_IFBLK: return 'block-device'
case constants.S_IFCHR: return 'character-device'
case constants.S_IFDIR: return 'directory'
case constants.S_IFIFO: return 'fifo'
case constants.S_IFLNK: return 'symlink'
}
return 'file'
}
var Sink = function (to) {
Writable.call(this)
this.written = 0
this._to = to
this._destroyed = false
}
inherits(Sink, Writable)
Sink.prototype._write = function (data, enc, cb) {
this.written += data.length
if (this._to.push(data)) return cb()
this._to._drain = cb
}
Sink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var LinkSink = function () {
Writable.call(this)
this.linkname = ''
this._decoder = new StringDecoder('utf-8')
this._destroyed = false
}
inherits(LinkSink, Writable)
LinkSink.prototype._write = function (data, enc, cb) {
this.linkname += this._decoder.write(data)
cb()
}
LinkSink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Void = function () {
Writable.call(this)
this._destroyed = false
}
inherits(Void, Writable)
Void.prototype._write = function (data, enc, cb) {
cb(new Error('No body allowed for this entry'))
}
Void.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Pack = function (opts) {
if (!(this instanceof Pack)) return new Pack(opts)
Readable.call(this, opts)
this._drain = noop
this._finalized = false
this._finalizing = false
this._destroyed = false
this._stream = null
}
inherits(Pack, Readable)
Pack.prototype.entry = function (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this._destroyed) return
if (typeof buffer === 'function') {
callback = buffer
buffer = null
}
if (!callback) callback = noop
var self = this
if (!header.size || header.type === 'symlink') header.size = 0
if (!header.type) header.type = modeToType(header.mode)
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
if (!header.uid) header.uid = 0
if (!header.gid) header.gid = 0
if (!header.mtime) header.mtime = new Date()
if (typeof buffer === 'string') buffer = Buffer.from(buffer)
if (Buffer.isBuffer(buffer)) {
header.size = buffer.length
this._encode(header)
var ok = this.push(buffer)
overflow(self, header.size)
if (ok) process.nextTick(callback)
else this._drain = callback
return new Void()
}
if (header.type === 'symlink' && !header.linkname) {
var linkSink = new LinkSink()
eos(linkSink, function (err) {
if (err) { // stream was closed
self.destroy()
return callback(err)
}
header.linkname = linkSink.linkname
self._encode(header)
callback()
})
return linkSink
}
this._encode(header)
if (header.type !== 'file' && header.type !== 'contiguous-file') {
process.nextTick(callback)
return new Void()
}
var sink = new Sink(this)
this._stream = sink
eos(sink, function (err) {
self._stream = null
if (err) { // stream was closed
self.destroy()
return callback(err)
}
if (sink.written !== header.size) { // corrupting tar
self.destroy()
return callback(new Error('size mismatch'))
}
overflow(self, header.size)
if (self._finalizing) self.finalize()
callback()
})
return sink
}
Pack.prototype.finalize = function () {
if (this._stream) {
this._finalizing = true
return
}
if (this._finalized) return
this._finalized = true
this.push(END_OF_TAR)
this.push(null)
}
Pack.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
if (err) this.emit('error', err)
this.emit('close')
if (this._stream && this._stream.destroy) this._stream.destroy()
}
Pack.prototype._encode = function (header) {
if (!header.pax) {
var buf = headers.encode(header)
if (buf) {
this.push(buf)
return
}
}
this._encodePax(header)
}
Pack.prototype._encodePax = function (header) {
var paxHeader = headers.encodePax({
name: header.name,
linkname: header.linkname,
pax: header.pax
})
var newHeader = {
name: 'PaxHeader',
mode: header.mode,
uid: header.uid,
gid: header.gid,
size: paxHeader.length,
mtime: header.mtime,
type: 'pax-header',
linkname: header.linkname && 'PaxHeader',
uname: header.uname,
gname: header.gname,
devmajor: header.devmajor,
devminor: header.devminor
}
this.push(headers.encode(newHeader))
this.push(paxHeader)
overflow(this, paxHeader.length)
newHeader.size = header.size
newHeader.type = header.type
this.push(headers.encode(newHeader))
}
Pack.prototype._read = function (n) {
var drain = this._drain
this._drain = noop
drain()
}
module.exports = Pack

View File

@@ -0,0 +1,58 @@
{
"name": "tar-stream",
"version": "2.2.0",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
"author": "Mathias Buus <mathiasbuus@gmail.com>",
"dependencies": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
},
"devDependencies": {
"concat-stream": "^2.0.0",
"standard": "^12.0.1",
"tape": "^4.9.2"
},
"scripts": {
"test": "standard && tape test/extract.js test/pack.js",
"test-all": "standard && tape test/*.js"
},
"keywords": [
"tar",
"tarball",
"parse",
"parser",
"generate",
"generator",
"stream",
"stream2",
"streams",
"streams2",
"streaming",
"pack",
"extract",
"modify"
],
"bugs": {
"url": "https://github.com/mafintosh/tar-stream/issues"
},
"homepage": "https://github.com/mafintosh/tar-stream",
"main": "index.js",
"files": [
"*.js",
"LICENSE"
],
"directories": {
"test": "test"
},
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/mafintosh/tar-stream.git"
},
"engines": {
"node": ">=6"
}
}

View File

@@ -0,0 +1,11 @@
const tar = require('tar-stream')
const fs = require('fs')
const path = require('path')
const pipeline = require('pump') // eequire('stream').pipeline
fs.createReadStream('test.tar')
.pipe(tar.extract())
.on('entry', function (header, stream, done) {
console.log(header.name)
pipeline(stream, fs.createWriteStream(path.join('/tmp', header.name)), done)
})

67
node_modules/prebuild-install/package.json generated vendored Normal file
View File

@@ -0,0 +1,67 @@
{
"name": "prebuild-install",
"version": "7.1.3",
"description": "A command line tool to easily install prebuilt binaries for multiple version of node/iojs on a specific platform",
"scripts": {
"test": "standard && hallmark && tape test/*-test.js",
"hallmark": "hallmark --fix"
},
"keywords": [
"prebuilt",
"binaries",
"native",
"addon",
"module",
"c",
"c++",
"bindings",
"devops",
"napi"
],
"dependencies": {
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",
"github-from-package": "0.0.0",
"minimist": "^1.2.3",
"mkdirp-classic": "^0.5.3",
"napi-build-utils": "^2.0.0",
"node-abi": "^3.3.0",
"pump": "^3.0.0",
"rc": "^1.2.7",
"simple-get": "^4.0.0",
"tar-fs": "^2.0.0",
"tunnel-agent": "^0.6.0"
},
"devDependencies": {
"a-native-module": "^1.0.0",
"hallmark": "^4.0.0",
"nock": "^10.0.6",
"rimraf": "^2.5.2",
"standard": "^16.0.4",
"tape": "^5.3.1",
"tempy": "0.2.1"
},
"bin": "./bin.js",
"repository": {
"type": "git",
"url": "https://github.com/prebuild/prebuild-install.git"
},
"author": "Mathias Buus (@mafintosh)",
"contributors": [
"Julian Gruber <julian@juliangruber.com> (https://github.com/juliangruber)",
"Brett Lawson <brett19@gmail.com> (https://github.com/brett19)",
"Pieter Hintjens <ph@imatix.com> (https://github.com/hintjens)",
"Lars-Magnus Skog <ralphtheninja@riseup.net> (https://github.com/ralphtheninja)",
"Jesús Leganés Combarro <piranna@gmail.com> (https://github.com/piranna)",
"Mathias Küsel <mathiask@hotmail.de> (https://github.com/mathiask88)",
"Lukas Geiger <lukas.geiger94@gmail.com> (https://github.com/lgeiger)"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/prebuild/prebuild-install/issues"
},
"homepage": "https://github.com/prebuild/prebuild-install",
"engines": {
"node": ">=10"
}
}

35
node_modules/prebuild-install/proxy.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
const url = require('url')
const tunnel = require('tunnel-agent')
const util = require('./util')
function applyProxy (reqOpts, opts) {
const log = opts.log || util.noopLogger
const proxy = opts['https-proxy'] || opts.proxy
if (proxy) {
// eslint-disable-next-line node/no-deprecated-api
const parsedDownloadUrl = url.parse(reqOpts.url)
// eslint-disable-next-line node/no-deprecated-api
const parsedProxy = url.parse(proxy)
const uriProtocol = (parsedDownloadUrl.protocol === 'https:' ? 'https' : 'http')
const proxyProtocol = (parsedProxy.protocol === 'https:' ? 'Https' : 'Http')
const tunnelFnName = [uriProtocol, proxyProtocol].join('Over')
reqOpts.agent = tunnel[tunnelFnName]({
proxy: {
host: parsedProxy.hostname,
port: +parsedProxy.port,
proxyAuth: parsedProxy.auth
}
})
log.http('request', 'Proxy setup detected (Host: ' +
parsedProxy.hostname + ', Port: ' +
parsedProxy.port + ', Authentication: ' +
(parsedProxy.auth ? 'Yes' : 'No') + ')' +
' Tunneling with ' + tunnelFnName)
}
return reqOpts
}
module.exports = applyProxy

64
node_modules/prebuild-install/rc.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
const path = require('path')
const minimist = require('minimist')
const getAbi = require('node-abi').getAbi
const detectLibc = require('detect-libc')
const napi = require('napi-build-utils')
const env = process.env
const libc = env.LIBC || process.env.npm_config_libc ||
(detectLibc.isNonGlibcLinuxSync() && detectLibc.familySync()) || ''
// Get the configuration
module.exports = function (pkg) {
const pkgConf = pkg.config || {}
const buildFromSource = env.npm_config_build_from_source
const rc = require('rc')('prebuild-install', {
target: pkgConf.target || env.npm_config_target || process.versions.node,
runtime: pkgConf.runtime || env.npm_config_runtime || 'node',
arch: pkgConf.arch || env.npm_config_arch || process.arch,
libc: libc,
platform: env.npm_config_platform || process.platform,
debug: env.npm_config_debug === 'true',
force: false,
verbose: env.npm_config_verbose === 'true',
buildFromSource: buildFromSource === pkg.name || buildFromSource === 'true',
path: '.',
proxy: env.npm_config_proxy || env.http_proxy || env.HTTP_PROXY,
'https-proxy': env.npm_config_https_proxy || env.https_proxy || env.HTTPS_PROXY,
'local-address': env.npm_config_local_address,
'local-prebuilds': 'prebuilds',
'tag-prefix': 'v',
download: env.npm_config_download
}, minimist(process.argv, {
alias: {
target: 't',
runtime: 'r',
help: 'h',
arch: 'a',
path: 'p',
version: 'v',
download: 'd',
buildFromSource: 'build-from-source',
token: 'T'
}
}))
rc.path = path.resolve(rc.path === true ? '.' : rc.path || '.')
if (napi.isNapiRuntime(rc.runtime) && rc.target === process.versions.node) {
rc.target = napi.getBestNapiBuildVersion()
}
rc.abi = napi.isNapiRuntime(rc.runtime) ? rc.target : getAbi(rc.target, rc.runtime)
rc.libc = rc.platform !== 'linux' || rc.libc === detectLibc.GLIBC ? '' : rc.libc
return rc
}
// Print the configuration values when executed standalone for testing purposses
if (!module.parent) {
console.log(JSON.stringify(module.exports({}), null, 2))
}

143
node_modules/prebuild-install/util.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
const path = require('path')
const github = require('github-from-package')
const home = require('os').homedir
const crypto = require('crypto')
const expandTemplate = require('expand-template')()
function getDownloadUrl (opts) {
const pkgName = opts.pkg.name.replace(/^@[a-zA-Z0-9_\-.~]+\//, '')
return expandTemplate(urlTemplate(opts), {
name: pkgName,
package_name: pkgName,
version: opts.pkg.version,
major: opts.pkg.version.split('.')[0],
minor: opts.pkg.version.split('.')[1],
patch: opts.pkg.version.split('.')[2],
prerelease: opts.pkg.version.split('-')[1],
build: opts.pkg.version.split('+')[1],
abi: opts.abi || process.versions.modules,
node_abi: process.versions.modules,
runtime: opts.runtime || 'node',
platform: opts.platform,
arch: opts.arch,
libc: opts.libc || '',
configuration: (opts.debug ? 'Debug' : 'Release'),
module_name: opts.pkg.binary && opts.pkg.binary.module_name,
tag_prefix: opts['tag-prefix']
})
}
function getApiUrl (opts) {
return github(opts.pkg).replace('github.com', 'api.github.com/repos') + '/releases'
}
function getAssetUrl (opts, assetId) {
return getApiUrl(opts) + '/assets/' + assetId
}
function urlTemplate (opts) {
if (typeof opts.download === 'string') {
return opts.download
}
const packageName = '{name}-v{version}-{runtime}-v{abi}-{platform}{libc}-{arch}.tar.gz'
const hostMirrorUrl = getHostMirrorUrl(opts)
if (hostMirrorUrl) {
return hostMirrorUrl + '/{tag_prefix}{version}/' + packageName
}
if (opts.pkg.binary && opts.pkg.binary.host) {
return [
opts.pkg.binary.host,
opts.pkg.binary.remote_path,
opts.pkg.binary.package_name || packageName
].map(function (path) {
return trimSlashes(path)
}).filter(Boolean).join('/')
}
return github(opts.pkg) + '/releases/download/{tag_prefix}{version}/' + packageName
}
function getEnvPrefix (pkgName) {
return 'npm_config_' + (pkgName || '').replace(/[^a-zA-Z0-9]/g, '_').replace(/^_/, '')
}
function getHostMirrorUrl (opts) {
const propName = getEnvPrefix(opts.pkg.name) + '_binary_host'
return process.env[propName] || process.env[propName + '_mirror']
}
function trimSlashes (str) {
if (str) return str.replace(/^\.\/|^\/|\/$/g, '')
}
function cachedPrebuild (url) {
const digest = crypto.createHash('sha512').update(url).digest('hex').slice(0, 6)
return path.join(prebuildCache(), digest + '-' + path.basename(url).replace(/[^a-zA-Z0-9.]+/g, '-'))
}
function npmCache () {
const env = process.env
return env.npm_config_cache || (env.APPDATA ? path.join(env.APPDATA, 'npm-cache') : path.join(home(), '.npm'))
}
function prebuildCache () {
return path.join(npmCache(), '_prebuilds')
}
function tempFile (cached) {
return cached + '.' + process.pid + '-' + Math.random().toString(16).slice(2) + '.tmp'
}
function packageOrigin (env, pkg) {
// npm <= 6: metadata is stored on disk in node_modules
if (pkg._from) {
return pkg._from
}
// npm 7: metadata is exposed to environment by arborist
if (env.npm_package_from) {
// NOTE: seems undefined atm (npm 7.0.2)
return env.npm_package_from
}
if (env.npm_package_resolved) {
// NOTE: not sure about the difference with _from, but it's all we have
return env.npm_package_resolved
}
}
function localPrebuild (url, opts) {
const propName = getEnvPrefix(opts.pkg.name) + '_local_prebuilds'
const prefix = process.env[propName] || opts['local-prebuilds'] || 'prebuilds'
return path.join(prefix, path.basename(url))
}
const noopLogger = {
http: function () {},
silly: function () {},
debug: function () {},
info: function () {},
warn: function () {},
error: function () {},
critical: function () {},
alert: function () {},
emergency: function () {},
notice: function () {},
verbose: function () {},
fatal: function () {}
}
exports.getDownloadUrl = getDownloadUrl
exports.getApiUrl = getApiUrl
exports.getAssetUrl = getAssetUrl
exports.urlTemplate = urlTemplate
exports.cachedPrebuild = cachedPrebuild
exports.localPrebuild = localPrebuild
exports.prebuildCache = prebuildCache
exports.npmCache = npmCache
exports.tempFile = tempFile
exports.packageOrigin = packageOrigin
exports.noopLogger = noopLogger