master^2
ashok 1 year ago
commit e2ddf08b3a

1
node_modules/.bin/color-support generated vendored

@ -1 +0,0 @@
../color-support/bin.js

1
node_modules/.bin/node-pre-gyp generated vendored

@ -1 +0,0 @@
../@mapbox/node-pre-gyp/bin/node-pre-gyp

1
node_modules/.bin/nopt generated vendored

@ -1 +0,0 @@
../nopt/bin/nopt.js

11644
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

@ -1,510 +0,0 @@
# node-pre-gyp changelog
## 1.0.11
- Fixes dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
## 1.0.10
- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
## 1.0.9
- Upgraded node-fetch to 2.6.7 to address [CVE-2022-0235](https://www.cve.org/CVERecord?id=CVE-2022-0235)
- Upgraded detect-libc to 2.0.0 to use non-blocking NodeJS(>=12) Report API
## 1.0.8
- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624)
## 1.0.7
- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw
## 1.0.6
- Added node v17 to the internal node releases listing
- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11)
- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590)
## 1.0.5
- Fix circular reference warning with node >= v14
## 1.0.4
- Added node v16 to the internal node releases listing
## 1.0.3
- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571)
- New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle`
## 1.0.2
- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572)
## 1.0.1
- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31
## 1.0.0
- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated.
- New: support for staging and production s3 targets (see README.md)
- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands.
- Dropped node v6 support, added node v14 support
- Switched tests to use mapbox-owned bucket for testing
- Added coverage tracking and linting with eslint
- Added back support for symlinks inside the tarball
- Upgraded all test apps to N-API/node-addon-api
- New: support for staging and production s3 targets (see README.md)
- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default.
- Replaced needle with node-fetch
- Added proxy support for node-fetch
- Upgraded to mkdirp@1.x
## 0.17.0
- Got travis + appveyor green again
- Added support for more node versions
## 0.16.0
- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520)
## 0.15.0
- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492)
- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502)
- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501)
## 0.14.0
- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434)
- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454)
- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459)
- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483)
## 0.13.0
- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449)
## 0.12.0
- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428)
- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422)
## 0.11.0
- Fixed double-install problem with node v10
- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405)
## 0.10.3
- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0
## 0.10.2
- Fixed rc/deep-extent security vulnerability
- Fixed broken reinstall script do to incorrectly named get_best_napi_version
## 0.10.1
- Fix needle error event (@medns)
## 0.10.0
- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371)
- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366)
- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372)
- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372)
- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14)
- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375)
## 0.9.1
- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361)
## 0.9.0
- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350)
## 0.8.0
- N-API support (@inspiredware)
## 0.7.1
- Upgraded to tar v4.x
## 0.7.0
- Updated request and hawk (#347)
- Dropped node v0.10.x support
## 0.6.40
- Improved error reporting if an install fails
## 0.6.39
- Support for node v9
- Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux
## 0.6.38
- Maintaining compatibility (for v0.6.x series) with node v0.10.x
## 0.6.37
- Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json
- Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291)
- Add new node versions to crosswalk
- Ported tests to use tape instead of mocha
- Got appveyor tests passing by downgrading npm and node-gyp
## 0.6.36
- Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory.
- Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron)
## 0.6.35
- No longer recommending `npm ls` in `prepublish` (#291)
- Fixed testbinary command (#283) @szdavid92
## 0.6.34
- Added new node versions to crosswalk, including v8
- Upgraded deps to latest versions, started using `^` instead of `~` for all deps.
## 0.6.33
- Improved support for yarn
## 0.6.32
- Honor npm configuration for CA bundles (@heikkipora)
- Add node-pre-gyp and npm versions to user agent (@addaleax)
- Updated various deps
- Add known node version for v7.x
## 0.6.31
- Updated various deps
## 0.6.30
- Update to npmlog@4.x and semver@5.3.x
- Add known node version for v6.5.0
## 0.6.29
- Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0
## 0.6.28
- Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default
and users need to know why builds are falling back to source compiles that might then error out.
## 0.6.27
- Add known node version for node v6
- Stopped bundling dependencies
- Documented method for module authors to avoid bundling node-pre-gyp
- See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details
## 0.6.26
- Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg
## 0.6.25
- Improved support for auto-detection of electron runtime in `node-pre-gyp.find()`
- Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187
- Add known node version for 4.4.1 and 5.9.1
## 0.6.24
- Add known node version for 5.8.0, 5.9.0, and 4.4.0.
## 0.6.23
- Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1.
## 0.6.22
- Add known node version for 4.3.1, and 5.7.0.
## 0.6.21
- Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0.
## 0.6.20
- Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0.
## 0.6.19
- Add known node version for 4.2.4
## 0.6.18
- Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x
## 0.6.17
- Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'`
## 0.6.16
- Added known version in crosswalk for 5.1.0.
## 0.6.15
- Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182)
- Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170)
- Added known version in crosswalk for 4.2.2.
## 0.6.14
- Added node 5.x version
## 0.6.13
- Added more known node 4.x versions
## 0.6.12
- Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz
## 0.6.11
- Added known node and io.js versions including more 3.x and 4.x versions
## 0.6.10
- Added known node and io.js versions including 3.x and 4.x versions
- Upgraded `tar` dep
## 0.6.9
- Upgraded `rc` dep
- Updated known io.js version: v2.4.0
## 0.6.8
- Upgraded `semver` and `rimraf` deps
- Updated known node and io.js versions
## 0.6.7
- Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94)
## 0.6.6
- Updated with known io.js 2.0.0 version
## 0.6.5
- Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887)
- Updated to semver@4.3.2
- Updated known node v0.12.x versions and io.js 1.x versions.
## 0.6.4
- Improved support for `io.js` (@fengmk2)
- Test coverage improvements (@mikemorris)
- Fixed support for `--dist-url` that regressed in 0.6.3
## 0.6.3
- Added support for passing raw options to node-gyp using `--` separator. Flags passed after
the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed
after the `--` will be passed directly to make/visual studio.
- Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly
- Fix issue with require validation not working on windows 7 (@edgarsilva)
## 0.6.2
- Support for io.js >= v1.0.2
- Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`.
## 0.6.1
- Fixed bundled `tar` version
## 0.6.0
- BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124)
- Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`.
- Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place.
- Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped.
- Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version`
## 0.5.31
- Added support for deducing node_abi for node.js runtime from previous release if the series is even
- Added support for --target=0.10.33
## 0.5.30
- Repackaged with latest bundled deps
## 0.5.29
- Added support for semver `build`.
- Fixed support for downloading from urls that include `+`.
## 0.5.28
- Now reporting unix style paths only in reveal command
## 0.5.27
- Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo
- Fixed support for installing when path contains a `'` - @halfdan
- Ported tests to mocha
## 0.5.26
- Fix node-webkit support when `--target` option is not provided
## 0.5.25
- Fix bundling of deps
## 0.5.24
- Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31
## 0.5.23
- Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value
- Added support for `--silent` (shortcut for `--loglevel=silent`)
## 0.5.22
- Fixed node-webkit versioning name (NOTE: node-webkit support still experimental)
## 0.5.21
- New package to fix `shasum check failed` error with v0.5.20
## 0.5.20
- Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90)
## 0.5.19
- Updated to know about more node-webkit releases
## 0.5.18
- Updated to know about more node-webkit releases
## 0.5.17
- Updated to know about node v0.10.29 release
## 0.5.16
- Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86)
## 0.5.15
- Fixed installation of windows packages sub directories on unix systems (#84)
## 0.5.14
- Finished support for cross building using `--target_platform` option (#82)
- Now skipping binary validation on install if target arch/platform do not match the host.
- Removed multi-arch validing for OS X since it required a FAT node.js binary
## 0.5.13
- Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled.
## 0.5.12
- Improved support for node-webkit (@Mithgol)
## 0.5.11
- Updated target versions listing
## 0.5.10
- Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72)
- Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary
- Failed install due to `testbinary` check failure no longer leaves behind binary (#70)
## 0.5.9
- Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60)
## 0.5.8
- Started bundling deps
## 0.5.7
- Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63)
- Exposed the internal `testbinary` command in node-pre-gyp command line tool
- Fixed minor bug so that `fallback_to_build` option is always respected
## 0.5.6
- Added support for versioning on the `name` value in `package.json` (#57).
- Moved to using streams for reading tarball when publishing (#52)
## 0.5.5
- Improved binary validation that also now works with node-webkit (@Mithgol)
- Upgraded test apps to work with node v0.11.x
- Improved test coverage
## 0.5.4
- No longer depends on external install of node-gyp for compiling builds.
## 0.5.3
- Reverted fix for debian/nodejs since it broke windows (#45)
## 0.5.2
- Support for debian systems where the node binary is named `nodejs` (#45)
- Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed)
- Updated abi-crosswalk with node v0.10.26 entry.
## 0.5.1
- Various minor bug fixes, several improving windows support for publishing.
## 0.5.0
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
- Added `remote_path` which also supports versioning.
- Changed `remote_uri` to `host`.
## 0.4.2
- Added support for `--target` flag to request cross-compile against a specific node/node-webkit version.
- Added preliminary support for node-webkit
- Fixed support for `--target_arch` option being respected in all cases.
## 0.4.1
- Fixed exception when only stderr is available in binary test (@bendi / #31)
## 0.4.0
- Enforce only `https:` based remote publishing access.
- Added `node-pre-gyp info` command to display listing of published binaries
- Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option.
- Added support for S3 prefixes.
## 0.3.1
- Added `unpublish` command.
- Fixed module path construction in tests.
- Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target.
## 0.3.0
- Support for packaging all files in `module_path` directory - see `app4` for example
- Added `testpackage` command.
- Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that.
- `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks.

@ -1 +0,0 @@
../semver/bin/semver.js

@ -1,38 +0,0 @@
{
"name": "semver",
"version": "6.3.1",
"description": "The semantic version parser used by npm.",
"main": "semver.js",
"scripts": {
"test": "tap test/ --100 --timeout=30",
"lint": "echo linting disabled",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run lint -- --fix",
"snap": "tap test/ --100 --timeout=30",
"posttest": "npm run lint"
},
"devDependencies": {
"@npmcli/template-oss": "4.17.0",
"tap": "^12.7.0"
},
"license": "ISC",
"repository": {
"type": "git",
"url": "https://github.com/npm/node-semver.git"
},
"bin": {
"semver": "./bin/semver.js"
},
"files": [
"bin",
"range.bnf",
"semver.js"
],
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"content": "./scripts/template-oss",
"version": "4.17.0"
}
}

File diff suppressed because it is too large Load Diff

@ -1,654 +0,0 @@
semver(1) -- The semantic versioner for npm
===========================================
## Install
```bash
npm install semver
````
## Usage
As a node module:
```js
const semver = require('semver')
semver.valid('1.2.3') // '1.2.3'
semver.valid('a.b.c') // null
semver.clean(' =v1.2.3 ') // '1.2.3'
semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
semver.gt('1.2.3', '9.8.7') // false
semver.lt('1.2.3', '9.8.7') // true
semver.minVersion('>=1.0.0') // '1.0.0'
semver.valid(semver.coerce('v2')) // '2.0.0'
semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
```
You can also just load the module for the function that you care about if
you'd like to minimize your footprint.
```js
// load the whole API at once in a single object
const semver = require('semver')
// or just load the bits you need
// all of them listed here, just pick and choose what you want
// classes
const SemVer = require('semver/classes/semver')
const Comparator = require('semver/classes/comparator')
const Range = require('semver/classes/range')
// functions for working with versions
const semverParse = require('semver/functions/parse')
const semverValid = require('semver/functions/valid')
const semverClean = require('semver/functions/clean')
const semverInc = require('semver/functions/inc')
const semverDiff = require('semver/functions/diff')
const semverMajor = require('semver/functions/major')
const semverMinor = require('semver/functions/minor')
const semverPatch = require('semver/functions/patch')
const semverPrerelease = require('semver/functions/prerelease')
const semverCompare = require('semver/functions/compare')
const semverRcompare = require('semver/functions/rcompare')
const semverCompareLoose = require('semver/functions/compare-loose')
const semverCompareBuild = require('semver/functions/compare-build')
const semverSort = require('semver/functions/sort')
const semverRsort = require('semver/functions/rsort')
// low-level comparators between versions
const semverGt = require('semver/functions/gt')
const semverLt = require('semver/functions/lt')
const semverEq = require('semver/functions/eq')
const semverNeq = require('semver/functions/neq')
const semverGte = require('semver/functions/gte')
const semverLte = require('semver/functions/lte')
const semverCmp = require('semver/functions/cmp')
const semverCoerce = require('semver/functions/coerce')
// working with ranges
const semverSatisfies = require('semver/functions/satisfies')
const semverMaxSatisfying = require('semver/ranges/max-satisfying')
const semverMinSatisfying = require('semver/ranges/min-satisfying')
const semverToComparators = require('semver/ranges/to-comparators')
const semverMinVersion = require('semver/ranges/min-version')
const semverValidRange = require('semver/ranges/valid')
const semverOutside = require('semver/ranges/outside')
const semverGtr = require('semver/ranges/gtr')
const semverLtr = require('semver/ranges/ltr')
const semverIntersects = require('semver/ranges/intersects')
const semverSimplifyRange = require('semver/ranges/simplify')
const semverRangeSubset = require('semver/ranges/subset')
```
As a command-line utility:
```
$ semver -h
A JavaScript implementation of the https://semver.org/ specification
Copyright Isaac Z. Schlueter
Usage: semver [options] <version> [<version> [...]]
Prints valid versions sorted by SemVer precedence
Options:
-r --range <range>
Print versions that match the specified range.
-i --increment [<level>]
Increment a version by the specified level. Level can
be one of: major, minor, patch, premajor, preminor,
prepatch, or prerelease. Default level is 'patch'.
Only one version may be specified.
--preid <identifier>
Identifier to be used to prefix premajor, preminor,
prepatch or prerelease version increments.
-l --loose
Interpret versions and ranges loosely
-n <0|1>
This is the base to be used for the prerelease identifier.
-p --include-prerelease
Always include prerelease versions in range matching
-c --coerce
Coerce a string into SemVer if possible
(does not imply --loose)
--rtl
Coerce version strings right to left
--ltr
Coerce version strings left to right (default)
Program exits successfully if any valid version satisfies
all supplied ranges, and prints all satisfying versions.
If no satisfying versions are found, then exits failure.
Versions are printed in ascending order, so supplying
multiple versions to the utility will just sort them.
```
## Versions
A "version" is described by the `v2.0.0` specification found at
<https://semver.org/>.
A leading `"="` or `"v"` character is stripped off and ignored.
## Ranges
A `version range` is a set of `comparators` that specify versions
that satisfy the range.
A `comparator` is composed of an `operator` and a `version`. The set
of primitive `operators` is:
* `<` Less than
* `<=` Less than or equal to
* `>` Greater than
* `>=` Greater than or equal to
* `=` Equal. If no operator is specified, then equality is assumed,
so this operator is optional but MAY be included.
For example, the comparator `>=1.2.7` would match the versions
`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and
would match the versions `2.0.0` and `3.1.0`, but not the versions
`1.0.1` or `1.1.0`.
Comparators can be joined by whitespace to form a `comparator set`,
which is satisfied by the **intersection** of all of the comparators
it includes.
A range is composed of one or more comparator sets, joined by `||`. A
version matches a range if and only if every comparator in at least
one of the `||`-separated comparator sets is satisfied by the version.
For example, the range `>=1.2.7 <1.3.0` would match the versions
`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
or `1.1.0`.
The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
### Prerelease Tags
If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
it will only be allowed to satisfy comparator sets if at least one
comparator with the same `[major, minor, patch]` tuple also has a
prerelease tag.
For example, the range `>1.2.3-alpha.3` would be allowed to match the
version `1.2.3-alpha.7`, but it would *not* be satisfied by
`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
range only accepts prerelease tags on the `1.2.3` version.
Version `3.4.5` *would* satisfy the range because it does not have a
prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
The purpose of this behavior is twofold. First, prerelease versions
frequently are updated very quickly, and contain many breaking changes
that are (by the author's design) not yet fit for public consumption.
Therefore, by default, they are excluded from range-matching
semantics.
Second, a user who has opted into using a prerelease version has
indicated the intent to use *that specific* set of
alpha/beta/rc versions. By including a prerelease tag in the range,
the user is indicating that they are aware of the risk. However, it
is still not appropriate to assume that they have opted into taking a
similar risk on the *next* set of prerelease versions.
Note that this behavior can be suppressed (treating all prerelease
versions as if they were normal versions, for range-matching)
by setting the `includePrerelease` flag on the options
object to any
[functions](https://github.com/npm/node-semver#functions) that do
range matching.
#### Prerelease Identifiers
The method `.inc` takes an additional `identifier` string argument that
will append the value of the string as a prerelease identifier:
```javascript
semver.inc('1.2.3', 'prerelease', 'beta')
// '1.2.4-beta.0'
```
command-line example:
```bash
$ semver 1.2.3 -i prerelease --preid beta
1.2.4-beta.0
```
Which then can be used to increment further:
```bash
$ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1
```
#### Prerelease Identifier Base
The method `.inc` takes an optional parameter 'identifierBase' string
that will let you let your prerelease number as zero-based or one-based.
Set to `false` to omit the prerelease number altogether.
If you do not specify this parameter, it will default to zero-based.
```javascript
semver.inc('1.2.3', 'prerelease', 'beta', '1')
// '1.2.4-beta.1'
```
```javascript
semver.inc('1.2.3', 'prerelease', 'beta', false)
// '1.2.4-beta'
```
command-line example:
```bash
$ semver 1.2.3 -i prerelease --preid beta -n 1
1.2.4-beta.1
```
```bash
$ semver 1.2.3 -i prerelease --preid beta -n false
1.2.4-beta
```
### Advanced Range Syntax
Advanced range syntax desugars to primitive comparators in
deterministic ways.
Advanced ranges may be combined in the same way as primitive
comparators using white space or `||`.
#### Hyphen Ranges `X.Y.Z - A.B.C`
Specifies an inclusive set.
* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
If a partial version is provided as the first version in the inclusive
range, then the missing pieces are replaced with zeroes.
* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
If a partial version is provided as the second version in the
inclusive range, then all versions that start with the supplied parts
of the tuple are accepted, but nothing that would be greater than the
provided tuple parts.
* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0`
* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0`
#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
Any of `X`, `x`, or `*` may be used to "stand in" for one of the
numeric values in the `[major, minor, patch]` tuple.
* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless
`includePrerelease` is specified, in which case any version at all
satisfies)
* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version)
* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions)
A partial version range is treated as an X-Range, so the special
character is in fact optional.
* `""` (empty string) := `*` := `>=0.0.0`
* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0`
* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0`
#### Tilde Ranges `~1.2.3` `~1.2` `~1`
Allows patch-level changes if a minor version is specified on the
comparator. Allows minor-level changes if not.
* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0`
* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`)
* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`)
* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0`
* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`)
* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`)
* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
different `[major, minor, patch]` tuple.
#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
Allows changes that do not modify the left-most non-zero element in the
`[major, minor, patch]` tuple. In other words, this allows patch and
minor updates for versions `1.0.0` and above, patch updates for
versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
Many authors treat a `0.x` version as if the `x` were the major
"breaking-change" indicator.
Caret ranges are ideal when an author may make breaking changes
between `0.2.4` and `0.3.0` releases, which is a common practice.
However, it presumes that there will *not* be breaking changes between
`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
additive (but non-breaking), according to commonly observed practices.
* `^1.2.3` := `>=1.2.3 <2.0.0-0`
* `^0.2.3` := `>=0.2.3 <0.3.0-0`
* `^0.0.3` := `>=0.0.3 <0.0.4-0`
* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
different `[major, minor, patch]` tuple.
* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the
`0.0.3` version *only* will be allowed, if they are greater than or
equal to `beta`. So, `0.0.3-pr.2` would be allowed.
When parsing caret ranges, a missing `patch` value desugars to the
number `0`, but will allow flexibility within that value, even if the
major and minor versions are both `0`.
* `^1.2.x` := `>=1.2.0 <2.0.0-0`
* `^0.0.x` := `>=0.0.0 <0.1.0-0`
* `^0.0` := `>=0.0.0 <0.1.0-0`
A missing `minor` and `patch` values will desugar to zero, but also
allow flexibility within those values, even if the major version is
zero.
* `^1.x` := `>=1.0.0 <2.0.0-0`
* `^0.x` := `>=0.0.0 <1.0.0-0`
### Range Grammar
Putting all this together, here is a Backus-Naur grammar for ranges,
for the benefit of parser authors:
```bnf
range-set ::= range ( logical-or range ) *
logical-or ::= ( ' ' ) * '||' ( ' ' ) *
range ::= hyphen | simple ( ' ' simple ) * | ''
hyphen ::= partial ' - ' partial
simple ::= primitive | partial | tilde | caret
primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
xr ::= 'x' | 'X' | '*' | nr
nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
tilde ::= '~' partial
caret ::= '^' partial
qualifier ::= ( '-' pre )? ( '+' build )?
pre ::= parts
build ::= parts
parts ::= part ( '.' part ) *
part ::= nr | [-0-9A-Za-z]+
```
## Functions
All methods and classes take a final `options` object argument. All
options in this object are `false` by default. The options supported
are:
- `loose`: Be more forgiving about not-quite-valid semver strings.
(Any resulting output will always be 100% strict compliant, of
course.) For backwards compatibility reasons, if the `options`
argument is a boolean value instead of an object, it is interpreted
to be the `loose` param.
- `includePrerelease`: Set to suppress the [default
behavior](https://github.com/npm/node-semver#prerelease-tags) of
excluding prerelease tagged versions from ranges unless they are
explicitly opted into.
Strict-mode Comparators and Ranges will be strict about the SemVer
strings that they parse.
* `valid(v)`: Return the parsed version, or null if it's not valid.
* `inc(v, release, options, identifier, identifierBase)`:
Return the version incremented by the release
type (`major`, `premajor`, `minor`, `preminor`, `patch`,
`prepatch`, or `prerelease`), or null if it's not valid
* `premajor` in one call will bump the version up to the next major
version and down to a prerelease of that major version.
`preminor`, and `prepatch` work the same way.
* If called from a non-prerelease version, `prerelease` will work the
same as `prepatch`. It increments the patch version and then makes a
prerelease. If the input version is already a prerelease it simply
increments it.
* `identifier` can be used to prefix `premajor`, `preminor`,
`prepatch`, or `prerelease` version increments. `identifierBase`
is the base to be used for the `prerelease` identifier.
* `prerelease(v)`: Returns an array of prerelease components, or null
if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
* `major(v)`: Return the major version number.
* `minor(v)`: Return the minor version number.
* `patch(v)`: Return the patch version number.
* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
or comparators intersect.
* `parse(v)`: Attempt to parse a string as a semantic version, returning either
a `SemVer` object or `null`.
### Comparison
* `gt(v1, v2)`: `v1 > v2`
* `gte(v1, v2)`: `v1 >= v2`
* `lt(v1, v2)`: `v1 < v2`
* `lte(v1, v2)`: `v1 <= v2`
* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
even if they're not the same string. You already know how to
compare strings.
* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
the corresponding function above. `"==="` and `"!=="` do simple
string comparison, but are included for completeness. Throws if an
invalid comparison string is provided.
* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions
in descending order when passed to `Array.sort()`.
* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions
are equal. Sorts in ascending order if passed to `Array.sort()`.
* `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`.
* `diff(v1, v2)`: Returns the difference between two versions by the release type
(`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
or null if the versions are the same.
### Sorting
* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild`
function.
* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on
the `compareBuild` function in descending order.
### Comparators
* `intersects(comparator)`: Return true if the comparators intersect
### Ranges
* `validRange(range)`: Return the valid range or null if it's not valid
* `satisfies(version, range)`: Return true if the version satisfies the
range.
* `maxSatisfying(versions, range)`: Return the highest version in the list
that satisfies the range, or `null` if none of them do.
* `minSatisfying(versions, range)`: Return the lowest version in the list
that satisfies the range, or `null` if none of them do.
* `minVersion(range)`: Return the lowest version that can match
the given range.
* `gtr(version, range)`: Return `true` if the version is greater than all the
versions possible in the range.
* `ltr(version, range)`: Return `true` if the version is less than all the
versions possible in the range.
* `outside(version, range, hilo)`: Return true if the version is outside
the bounds of the range in either the high or low direction. The
`hilo` argument must be either the string `'>'` or `'<'`. (This is
the function called by `gtr` and `ltr`.)
* `intersects(range)`: Return true if any of the range comparators intersect.
* `simplifyRange(versions, range)`: Return a "simplified" range that
matches the same items in the `versions` list as the range specified. Note
that it does *not* guarantee that it would match the same versions in all
cases, only for the set of versions provided. This is useful when
generating ranges by joining together multiple versions with `||`
programmatically, to provide the user with something a bit more
ergonomic. If the provided range is shorter in string-length than the
generated range, then that is returned.
* `subset(subRange, superRange)`: Return `true` if the `subRange` range is
entirely contained by the `superRange` range.
Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, *or* satisfy a range! For
example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
until `2.0.0`, so version `1.2.10` would not be greater than the
range (because `2.0.1` satisfies, which is higher), nor less than the
range (since `1.2.8` satisfies, which is lower), and it also does not
satisfy the range.
If you want to know if a version satisfies or does not satisfy a
range, use the `satisfies(version, range)` function.
### Coercion
* `coerce(version, options)`: Coerces a string to semver if possible
This aims to provide a very forgiving translation of a non-semver string to
semver. It looks for the first digit in a string and consumes all
remaining characters which satisfy at least a partial semver (e.g., `1`,
`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
`3.4.0`). Only text which lacks digits will fail coercion (`version one`
is not valid). The maximum length for any semver component considered for
coercion is 16 characters; longer components will be ignored
(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
components are invalid (`9999999999999999.4.7.4` is likely invalid).
If the `options.rtl` flag is set, then `coerce` will return the right-most
coercible tuple that does not share an ending index with a longer coercible
tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not
`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of
any other overlapping SemVer tuple.
If the `options.includePrerelease` flag is set, then the `coerce` result will contain
prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2`
will preserve prerelease `rc.1` and build `rev.2` in the result.
### Clean
* `clean(version)`: Clean a string to be a valid semver if possible
This will return a cleaned and trimmed semver version. If the provided
version is not valid a null will be returned. This does not work for
ranges.
ex.
* `s.clean(' = v 2.1.5foo')`: `null`
* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'`
* `s.clean(' = v 2.1.5-foo')`: `null`
* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'`
* `s.clean('=v2.1.5')`: `'2.1.5'`
* `s.clean(' =v2.1.5')`: `'2.1.5'`
* `s.clean(' 2.1.5 ')`: `'2.1.5'`
* `s.clean('~1.0.0')`: `null`
## Constants
As a convenience, helper constants are exported to provide information about what `node-semver` supports:
### `RELEASE_TYPES`
- major
- premajor
- minor
- preminor
- patch
- prepatch
- prerelease
```
const semver = require('semver');
if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) {
console.log('This is a valid release type!');
} else {
console.warn('This is NOT a valid release type!');
}
```
### `SEMVER_SPEC_VERSION`
2.0.0
```
const semver = require('semver');
console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION);
```
## Exported Modules
<!--
TODO: Make sure that all of these items are documented (classes aren't,
eg), and then pull the module name into the documentation for that specific
thing.
-->
You may pull in just the part of this semver utility that you need if you
are sensitive to packing and tree-shaking concerns. The main
`require('semver')` export uses getter functions to lazily load the parts
of the API that are used.
The following modules are available:
* `require('semver')`
* `require('semver/classes')`
* `require('semver/classes/comparator')`
* `require('semver/classes/range')`
* `require('semver/classes/semver')`
* `require('semver/functions/clean')`
* `require('semver/functions/cmp')`
* `require('semver/functions/coerce')`
* `require('semver/functions/compare')`
* `require('semver/functions/compare-build')`
* `require('semver/functions/compare-loose')`
* `require('semver/functions/diff')`
* `require('semver/functions/eq')`
* `require('semver/functions/gt')`
* `require('semver/functions/gte')`
* `require('semver/functions/inc')`
* `require('semver/functions/lt')`
* `require('semver/functions/lte')`
* `require('semver/functions/major')`
* `require('semver/functions/minor')`
* `require('semver/functions/neq')`
* `require('semver/functions/parse')`
* `require('semver/functions/patch')`
* `require('semver/functions/prerelease')`
* `require('semver/functions/rcompare')`
* `require('semver/functions/rsort')`
* `require('semver/functions/satisfies')`
* `require('semver/functions/sort')`
* `require('semver/functions/valid')`
* `require('semver/ranges/gtr')`
* `require('semver/ranges/intersects')`
* `require('semver/ranges/ltr')`
* `require('semver/ranges/max-satisfying')`
* `require('semver/ranges/min-satisfying')`
* `require('semver/ranges/min-version')`
* `require('semver/ranges/outside')`
* `require('semver/ranges/simplify')`
* `require('semver/ranges/subset')`
* `require('semver/ranges/to-comparators')`
* `require('semver/ranges/valid')`

@ -1,188 +0,0 @@
#!/usr/bin/env node
// Standalone semver comparison program.
// Exits successfully and prints matching version(s) if
// any supplied version is valid and passes all tests.
const argv = process.argv.slice(2)
let versions = []
const range = []
let inc = null
const version = require('../package.json').version
let loose = false
let includePrerelease = false
let coerce = false
let rtl = false
let identifier
let identifierBase
const semver = require('../')
const parseOptions = require('../internal/parse-options')
let reverse = false
let options = {}
const main = () => {
if (!argv.length) {
return help()
}
while (argv.length) {
let a = argv.shift()
const indexOfEqualSign = a.indexOf('=')
if (indexOfEqualSign !== -1) {
const value = a.slice(indexOfEqualSign + 1)
a = a.slice(0, indexOfEqualSign)
argv.unshift(value)
}
switch (a) {
case '-rv': case '-rev': case '--rev': case '--reverse':
reverse = true
break
case '-l': case '--loose':
loose = true
break
case '-p': case '--include-prerelease':
includePrerelease = true
break
case '-v': case '--version':
versions.push(argv.shift())
break
case '-i': case '--inc': case '--increment':
switch (argv[0]) {
case 'major': case 'minor': case 'patch': case 'prerelease':
case 'premajor': case 'preminor': case 'prepatch':
inc = argv.shift()
break
default:
inc = 'patch'
break
}
break
case '--preid':
identifier = argv.shift()
break
case '-r': case '--range':
range.push(argv.shift())
break
case '-n':
identifierBase = argv.shift()
if (identifierBase === 'false') {
identifierBase = false
}
break
case '-c': case '--coerce':
coerce = true
break
case '--rtl':
rtl = true
break
case '--ltr':
rtl = false
break
case '-h': case '--help': case '-?':
return help()
default:
versions.push(a)
break
}
}
options = parseOptions({ loose, includePrerelease, rtl })
versions = versions.map((v) => {
return coerce ? (semver.coerce(v, options) || { version: v }).version : v
}).filter((v) => {
return semver.valid(v)
})
if (!versions.length) {
return fail()
}
if (inc && (versions.length !== 1 || range.length)) {
return failInc()
}
for (let i = 0, l = range.length; i < l; i++) {
versions = versions.filter((v) => {
return semver.satisfies(v, range[i], options)
})
if (!versions.length) {
return fail()
}
}
versions
.sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options))
.map(v => semver.clean(v, options))
.map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v)
.forEach(v => console.log(v))
}
const failInc = () => {
console.error('--inc can only be used on a single version with no range')
fail()
}
const fail = () => process.exit(1)
const help = () => console.log(
`SemVer ${version}
A JavaScript implementation of the https://semver.org/ specification
Copyright Isaac Z. Schlueter
Usage: semver [options] <version> [<version> [...]]
Prints valid versions sorted by SemVer precedence
Options:
-r --range <range>
Print versions that match the specified range.
-i --increment [<level>]
Increment a version by the specified level. Level can
be one of: major, minor, patch, premajor, preminor,
prepatch, or prerelease. Default level is 'patch'.
Only one version may be specified.
--preid <identifier>
Identifier to be used to prefix premajor, preminor,
prepatch or prerelease version increments.
-l --loose
Interpret versions and ranges loosely
-p --include-prerelease
Always include prerelease versions in range matching
-c --coerce
Coerce a string into SemVer if possible
(does not imply --loose)
--rtl
Coerce version strings right to left
--ltr
Coerce version strings left to right (default)
-n <base>
Base number to be used for the prerelease identifier.
Can be either 0 or 1, or false to omit the number altogether.
Defaults to 0.
Program exits successfully if any valid version satisfies
all supplied ranges, and prints all satisfying versions.
If no satisfying versions are found, then exits failure.
Versions are printed in ascending order, so supplying
multiple versions to the utility will just sort them.`)
main()

@ -1,141 +0,0 @@
const ANY = Symbol('SemVer ANY')
// hoisted class for cyclic dependency
class Comparator {
static get ANY () {
return ANY
}
constructor (comp, options) {
options = parseOptions(options)
if (comp instanceof Comparator) {
if (comp.loose === !!options.loose) {
return comp
} else {
comp = comp.value
}
}
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options)
this.options = options
this.loose = !!options.loose
this.parse(comp)
if (this.semver === ANY) {
this.value = ''
} else {
this.value = this.operator + this.semver.version
}
debug('comp', this)
}
parse (comp) {
const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
const m = comp.match(r)
if (!m) {
throw new TypeError(`Invalid comparator: ${comp}`)
}
this.operator = m[1] !== undefined ? m[1] : ''
if (this.operator === '=') {
this.operator = ''
}
// if it literally is just '>' or '' then allow anything.
if (!m[2]) {
this.semver = ANY
} else {
this.semver = new SemVer(m[2], this.options.loose)
}
}
toString () {
return this.value
}
test (version) {
debug('Comparator.test', version, this.options.loose)
if (this.semver === ANY || version === ANY) {
return true
}
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
}
}
return cmp(version, this.operator, this.semver, this.options)
}
intersects (comp, options) {
if (!(comp instanceof Comparator)) {
throw new TypeError('a Comparator is required')
}
if (this.operator === '') {
if (this.value === '') {
return true
}
return new Range(comp.value, options).test(this.value)
} else if (comp.operator === '') {
if (comp.value === '') {
return true
}
return new Range(this.value, options).test(comp.semver)
}
options = parseOptions(options)
// Special cases where nothing can possibly be lower
if (options.includePrerelease &&
(this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) {
return false
}
if (!options.includePrerelease &&
(this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) {
return false
}
// Same direction increasing (> or >=)
if (this.operator.startsWith('>') && comp.operator.startsWith('>')) {
return true
}
// Same direction decreasing (< or <=)
if (this.operator.startsWith('<') && comp.operator.startsWith('<')) {
return true
}
// same SemVer and both sides are inclusive (<= or >=)
if (
(this.semver.version === comp.semver.version) &&
this.operator.includes('=') && comp.operator.includes('=')) {
return true
}
// opposite directions less than
if (cmp(this.semver, '<', comp.semver, options) &&
this.operator.startsWith('>') && comp.operator.startsWith('<')) {
return true
}
// opposite directions greater than
if (cmp(this.semver, '>', comp.semver, options) &&
this.operator.startsWith('<') && comp.operator.startsWith('>')) {
return true
}
return false
}
}
module.exports = Comparator
const parseOptions = require('../internal/parse-options')
const { safeRe: re, t } = require('../internal/re')
const cmp = require('../functions/cmp')
const debug = require('../internal/debug')
const SemVer = require('./semver')
const Range = require('./range')

@ -1,554 +0,0 @@
const SPACE_CHARACTERS = /\s+/g
// hoisted class for cyclic dependency
class Range {
constructor (range, options) {
options = parseOptions(options)
if (range instanceof Range) {
if (
range.loose === !!options.loose &&
range.includePrerelease === !!options.includePrerelease
) {
return range
} else {
return new Range(range.raw, options)
}
}
if (range instanceof Comparator) {
// just put it in the set and return
this.raw = range.value
this.set = [[range]]
this.formatted = undefined
return this
}
this.options = options
this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease
// First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range.trim().replace(SPACE_CHARACTERS, ' ')
// First, split on ||
this.set = this.raw
.split('||')
// map the range to a 2d array of comparators
.map(r => this.parseRange(r.trim()))
// throw out any comparator lists that are empty
// this generally means that it was not a valid range, which is allowed
// in loose mode, but will still throw if the WHOLE range is invalid.
.filter(c => c.length)
if (!this.set.length) {
throw new TypeError(`Invalid SemVer Range: ${this.raw}`)
}
// if we have any that are not the null set, throw out null sets.
if (this.set.length > 1) {
// keep the first one, in case they're all null sets
const first = this.set[0]
this.set = this.set.filter(c => !isNullSet(c[0]))
if (this.set.length === 0) {
this.set = [first]
} else if (this.set.length > 1) {
// if we have any that are *, then the range is just *
for (const c of this.set) {
if (c.length === 1 && isAny(c[0])) {
this.set = [c]
break
}
}
}
}
this.formatted = undefined
}
get range () {
if (this.formatted === undefined) {
this.formatted = ''
for (let i = 0; i < this.set.length; i++) {
if (i > 0) {
this.formatted += '||'
}
const comps = this.set[i]
for (let k = 0; k < comps.length; k++) {
if (k > 0) {
this.formatted += ' '
}
this.formatted += comps[k].toString().trim()
}
}
}
return this.formatted
}
format () {
return this.range
}
toString () {
return this.range
}
parseRange (range) {
// memoize range parsing for performance.
// this is a very hot path, and fully deterministic.
const memoOpts =
(this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) |
(this.options.loose && FLAG_LOOSE)
const memoKey = memoOpts + ':' + range
const cached = cache.get(memoKey)
if (cached) {
return cached
}
const loose = this.options.loose
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range)
// `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
debug('tilde trim', range)
// `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
debug('caret trim', range)
// At this point, the range is completely trimmed and
// ready to be split into comparators.
let rangeList = range
.split(' ')
.map(comp => parseComparator(comp, this.options))
.join(' ')
.split(/\s+/)
// >=0.0.0 is equivalent to *
.map(comp => replaceGTE0(comp, this.options))
if (loose) {
// in loose mode, throw out any that are not valid comparators
rangeList = rangeList.filter(comp => {
debug('loose invalid filter', comp, this.options)
return !!comp.match(re[t.COMPARATORLOOSE])
})
}
debug('range list', rangeList)
// if any comparators are the null set, then replace with JUST null set
// if more than one comparator, remove any * comparators
// also, don't include the same comparator more than once
const rangeMap = new Map()
const comparators = rangeList.map(comp => new Comparator(comp, this.options))
for (const comp of comparators) {
if (isNullSet(comp)) {
return [comp]
}
rangeMap.set(comp.value, comp)
}
if (rangeMap.size > 1 && rangeMap.has('')) {
rangeMap.delete('')
}
const result = [...rangeMap.values()]
cache.set(memoKey, result)
return result
}
intersects (range, options) {
if (!(range instanceof Range)) {
throw new TypeError('a Range is required')
}
return this.set.some((thisComparators) => {
return (
isSatisfiable(thisComparators, options) &&
range.set.some((rangeComparators) => {
return (
isSatisfiable(rangeComparators, options) &&
thisComparators.every((thisComparator) => {
return rangeComparators.every((rangeComparator) => {
return thisComparator.intersects(rangeComparator, options)
})
})
)
})
)
})
}
// if ANY of the sets match ALL of its comparators, then pass
test (version) {
if (!version) {
return false
}
if (typeof version === 'string') {
try {
version = new SemVer(version, this.options)
} catch (er) {
return false
}
}
for (let i = 0; i < this.set.length; i++) {
if (testSet(this.set[i], version, this.options)) {
return true
}
}
return false
}
}
module.exports = Range
const LRU = require('../internal/lrucache')
const cache = new LRU()
const parseOptions = require('../internal/parse-options')
const Comparator = require('./comparator')
const debug = require('../internal/debug')
const SemVer = require('./semver')
const {
safeRe: re,
t,
comparatorTrimReplace,
tildeTrimReplace,
caretTrimReplace,
} = require('../internal/re')
const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants')
const isNullSet = c => c.value === '<0.0.0-0'
const isAny = c => c.value === ''
// take a set of comparators and determine whether there
// exists a version which can satisfy it
const isSatisfiable = (comparators, options) => {
let result = true
const remainingComparators = comparators.slice()
let testComparator = remainingComparators.pop()
while (result && remainingComparators.length) {
result = remainingComparators.every((otherComparator) => {
return testComparator.intersects(otherComparator, options)
})
testComparator = remainingComparators.pop()
}
return result
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
const parseComparator = (comp, options) => {
debug('comp', comp, options)
comp = replaceCarets(comp, options)
debug('caret', comp)
comp = replaceTildes(comp, options)
debug('tildes', comp)
comp = replaceXRanges(comp, options)
debug('xrange', comp)
comp = replaceStars(comp, options)
debug('stars', comp)
return comp
}
const isX = id => !id || id.toLowerCase() === 'x' || id === '*'
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
// ~0.0.1 --> >=0.0.1 <0.1.0-0
const replaceTildes = (comp, options) => {
return comp
.trim()
.split(/\s+/)
.map((c) => replaceTilde(c, options))
.join(' ')
}
const replaceTilde = (comp, options) => {
const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
return comp.replace(r, (_, M, m, p, pr) => {
debug('tilde', comp, _, M, m, p, pr)
let ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = `>=${M}.0.0 <${+M + 1}.0.0-0`
} else if (isX(p)) {
// ~1.2 == >=1.2.0 <1.3.0-0
ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`
} else if (pr) {
debug('replaceTilde pr', pr)
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${+m + 1}.0-0`
} else {
// ~1.2.3 == >=1.2.3 <1.3.0-0
ret = `>=${M}.${m}.${p
} <${M}.${+m + 1}.0-0`
}
debug('tilde return', ret)
return ret
})
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
// ^1.2.3 --> >=1.2.3 <2.0.0-0
// ^1.2.0 --> >=1.2.0 <2.0.0-0
// ^0.0.1 --> >=0.0.1 <0.0.2-0
// ^0.1.0 --> >=0.1.0 <0.2.0-0
const replaceCarets = (comp, options) => {
return comp
.trim()
.split(/\s+/)
.map((c) => replaceCaret(c, options))
.join(' ')
}
const replaceCaret = (comp, options) => {
debug('caret', comp, options)
const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
const z = options.includePrerelease ? '-0' : ''
return comp.replace(r, (_, M, m, p, pr) => {
debug('caret', comp, _, M, m, p, pr)
let ret
if (isX(M)) {
ret = ''
} else if (isX(m)) {
ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`
} else if (isX(p)) {
if (M === '0') {
ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`
} else {
ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`
}
} else if (pr) {
debug('replaceCaret pr', pr)
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p}-${pr
} <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p}-${pr
} <${+M + 1}.0.0-0`
}
} else {
debug('no pr')
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p
}${z} <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p
}${z} <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p
} <${+M + 1}.0.0-0`
}
}
debug('caret return', ret)
return ret
})
}
const replaceXRanges = (comp, options) => {
debug('replaceXRanges', comp, options)
return comp
.split(/\s+/)
.map((c) => replaceXRange(c, options))
.join(' ')
}
const replaceXRange = (comp, options) => {
comp = comp.trim()
const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
return comp.replace(r, (ret, gtlt, M, m, p, pr) => {
debug('xRange', comp, ret, gtlt, M, m, p, pr)
const xM = isX(M)
const xm = xM || isX(m)
const xp = xm || isX(p)
const anyX = xp
if (gtlt === '=' && anyX) {
gtlt = ''
}
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options.includePrerelease ? '-0' : ''
if (xM) {
if (gtlt === '>' || gtlt === '<') {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if (gtlt && anyX) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if (xm) {
m = 0
}
p = 0
if (gtlt === '>') {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
gtlt = '>='
if (xm) {
M = +M + 1
m = 0
p = 0
} else {
m = +m + 1
p = 0
}
} else if (gtlt === '<=') {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if (xm) {
M = +M + 1
} else {
m = +m + 1
}
}
if (gtlt === '<') {
pr = '-0'
}
ret = `${gtlt + M}.${m}.${p}${pr}`
} else if (xm) {
ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`
} else if (xp) {
ret = `>=${M}.${m}.0${pr
} <${M}.${+m + 1}.0-0`
}
debug('xRange return', ret)
return ret
})
}
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
const replaceStars = (comp, options) => {
debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets!
return comp
.trim()
.replace(re[t.STAR], '')
}
const replaceGTE0 = (comp, options) => {
debug('replaceGTE0', comp, options)
return comp
.trim()
.replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
// TODO build?
const hyphenReplace = incPr => ($0,
from, fM, fm, fp, fpr, fb,
to, tM, tm, tp, tpr) => {
if (isX(fM)) {
from = ''
} else if (isX(fm)) {
from = `>=${fM}.0.0${incPr ? '-0' : ''}`
} else if (isX(fp)) {
from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`
} else if (fpr) {
from = `>=${from}`
} else {
from = `>=${from}${incPr ? '-0' : ''}`
}
if (isX(tM)) {
to = ''
} else if (isX(tm)) {
to = `<${+tM + 1}.0.0-0`
} else if (isX(tp)) {
to = `<${tM}.${+tm + 1}.0-0`
} else if (tpr) {
to = `<=${tM}.${tm}.${tp}-${tpr}`
} else if (incPr) {
to = `<${tM}.${tm}.${+tp + 1}-0`
} else {
to = `<=${to}`
}
return `${from} ${to}`.trim()
}
const testSet = (set, version, options) => {
for (let i = 0; i < set.length; i++) {
if (!set[i].test(version)) {
return false
}
}
if (version.prerelease.length && !options.includePrerelease) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for (let i = 0; i < set.length; i++) {
debug(set[i].semver)
if (set[i].semver === Comparator.ANY) {
continue
}
if (set[i].semver.prerelease.length > 0) {
const allowed = set[i].semver
if (allowed.major === version.major &&
allowed.minor === version.minor &&
allowed.patch === version.patch) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
}

@ -1,302 +0,0 @@
const debug = require('../internal/debug')
const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants')
const { safeRe: re, t } = require('../internal/re')
const parseOptions = require('../internal/parse-options')
const { compareIdentifiers } = require('../internal/identifiers')
class SemVer {
constructor (version, options) {
options = parseOptions(options)
if (version instanceof SemVer) {
if (version.loose === !!options.loose &&
version.includePrerelease === !!options.includePrerelease) {
return version
} else {
version = version.version
}
} else if (typeof version !== 'string') {
throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`)
}
if (version.length > MAX_LENGTH) {
throw new TypeError(
`version is longer than ${MAX_LENGTH} characters`
)
}
debug('SemVer', version, options)
this.options = options
this.loose = !!options.loose
// this isn't actually relevant for versions, but keep it so that we
// don't run into trouble passing this.options around.
this.includePrerelease = !!options.includePrerelease
const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
if (!m) {
throw new TypeError(`Invalid Version: ${version}`)
}
this.raw = version
// these are actually numbers
this.major = +m[1]
this.minor = +m[2]
this.patch = +m[3]
if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
throw new TypeError('Invalid major version')
}
if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
throw new TypeError('Invalid minor version')
}
if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
throw new TypeError('Invalid patch version')
}
// numberify any prerelease numeric ids
if (!m[4]) {
this.prerelease = []
} else {
this.prerelease = m[4].split('.').map((id) => {
if (/^[0-9]+$/.test(id)) {
const num = +id
if (num >= 0 && num < MAX_SAFE_INTEGER) {
return num
}
}
return id
})
}
this.build = m[5] ? m[5].split('.') : []
this.format()
}
format () {
this.version = `${this.major}.${this.minor}.${this.patch}`
if (this.prerelease.length) {
this.version += `-${this.prerelease.join('.')}`
}
return this.version
}
toString () {
return this.version
}
compare (other) {
debug('SemVer.compare', this.version, this.options, other)
if (!(other instanceof SemVer)) {
if (typeof other === 'string' && other === this.version) {
return 0
}
other = new SemVer(other, this.options)
}
if (other.version === this.version) {
return 0
}
return this.compareMain(other) || this.comparePre(other)
}
compareMain (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
return (
compareIdentifiers(this.major, other.major) ||
compareIdentifiers(this.minor, other.minor) ||
compareIdentifiers(this.patch, other.patch)
)
}
comparePre (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
// NOT having a prerelease is > having one
if (this.prerelease.length && !other.prerelease.length) {
return -1
} else if (!this.prerelease.length && other.prerelease.length) {
return 1
} else if (!this.prerelease.length && !other.prerelease.length) {
return 0
}
let i = 0
do {
const a = this.prerelease[i]
const b = other.prerelease[i]
debug('prerelease compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
compareBuild (other) {
if (!(other instanceof SemVer)) {
other = new SemVer(other, this.options)
}
let i = 0
do {
const a = this.build[i]
const b = other.build[i]
debug('build compare', i, a, b)
if (a === undefined && b === undefined) {
return 0
} else if (b === undefined) {
return 1
} else if (a === undefined) {
return -1
} else if (a === b) {
continue
} else {
return compareIdentifiers(a, b)
}
} while (++i)
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc (release, identifier, identifierBase) {
switch (release) {
case 'premajor':
this.prerelease.length = 0
this.patch = 0
this.minor = 0
this.major++
this.inc('pre', identifier, identifierBase)
break
case 'preminor':
this.prerelease.length = 0
this.patch = 0
this.minor++
this.inc('pre', identifier, identifierBase)
break
case 'prepatch':
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this.prerelease.length = 0
this.inc('patch', identifier, identifierBase)
this.inc('pre', identifier, identifierBase)
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease':
if (this.prerelease.length === 0) {
this.inc('patch', identifier, identifierBase)
}
this.inc('pre', identifier, identifierBase)
break
case 'major':
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if (
this.minor !== 0 ||
this.patch !== 0 ||
this.prerelease.length === 0
) {
this.major++
}
this.minor = 0
this.patch = 0
this.prerelease = []
break
case 'minor':
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if (this.patch !== 0 || this.prerelease.length === 0) {
this.minor++
}
this.patch = 0
this.prerelease = []
break
case 'patch':
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if (this.prerelease.length === 0) {
this.patch++
}
this.prerelease = []
break
// This probably shouldn't be used publicly.
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
case 'pre': {
const base = Number(identifierBase) ? 1 : 0
if (!identifier && identifierBase === false) {
throw new Error('invalid increment argument: identifier is empty')
}
if (this.prerelease.length === 0) {
this.prerelease = [base]
} else {
let i = this.prerelease.length
while (--i >= 0) {
if (typeof this.prerelease[i] === 'number') {
this.prerelease[i]++
i = -2
}
}
if (i === -1) {
// didn't increment anything
if (identifier === this.prerelease.join('.') && identifierBase === false) {
throw new Error('invalid increment argument: identifier already exists')
}
this.prerelease.push(base)
}
}
if (identifier) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
let prerelease = [identifier, base]
if (identifierBase === false) {
prerelease = [identifier]
}
if (compareIdentifiers(this.prerelease[0], identifier) === 0) {
if (isNaN(this.prerelease[1])) {
this.prerelease = prerelease
}
} else {
this.prerelease = prerelease
}
}
break
}
default:
throw new Error(`invalid increment argument: ${release}`)
}
this.raw = this.format()
if (this.build.length) {
this.raw += `+${this.build.join('.')}`
}
return this
}
}
module.exports = SemVer

@ -1,60 +0,0 @@
const SemVer = require('../classes/semver')
const parse = require('./parse')
const { safeRe: re, t } = require('../internal/re')
const coerce = (version, options) => {
if (version instanceof SemVer) {
return version
}
if (typeof version === 'number') {
version = String(version)
}
if (typeof version !== 'string') {
return null
}
options = options || {}
let match = null
if (!options.rtl) {
match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next
while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length)
) {
if (!match ||
next.index + next[0].length !== match.index + match[0].length) {
match = next
}
coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
}
// leave it in a clean state
coerceRtlRegex.lastIndex = -1
}
if (match === null) {
return null
}
const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
}
module.exports = coerce

@ -1,65 +0,0 @@
const parse = require('./parse.js')
const diff = (version1, version2) => {
const v1 = parse(version1, null, true)
const v2 = parse(version2, null, true)
const comparison = v1.compare(v2)
if (comparison === 0) {
return null
}
const v1Higher = comparison > 0
const highVersion = v1Higher ? v1 : v2
const lowVersion = v1Higher ? v2 : v1
const highHasPre = !!highVersion.prerelease.length
const lowHasPre = !!lowVersion.prerelease.length
if (lowHasPre && !highHasPre) {
// Going from prerelease -> no prerelease requires some special casing
// If the low version has only a major, then it will always be a major
// Some examples:
// 1.0.0-1 -> 1.0.0
// 1.0.0-1 -> 1.1.1
// 1.0.0-1 -> 2.0.0
if (!lowVersion.patch && !lowVersion.minor) {
return 'major'
}
// Otherwise it can be determined by checking the high version
if (highVersion.patch) {
// anything higher than a patch bump would result in the wrong version
return 'patch'
}
if (highVersion.minor) {
// anything higher than a minor bump would result in the wrong version
return 'minor'
}
// bumping major/minor/patch all have same result
return 'major'
}
// add the `pre` prefix if we are going to a prerelease version
const prefix = highHasPre ? 'pre' : ''
if (v1.major !== v2.major) {
return prefix + 'major'
}
if (v1.minor !== v2.minor) {
return prefix + 'minor'
}
if (v1.patch !== v2.patch) {
return prefix + 'patch'
}
// high and low are preleases
return 'prerelease'
}
module.exports = diff

@ -1,19 +0,0 @@
const SemVer = require('../classes/semver')
const inc = (version, release, options, identifier, identifierBase) => {
if (typeof (options) === 'string') {
identifierBase = identifier
identifier = options
options = undefined
}
try {
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release, identifier, identifierBase).version
} catch (er) {
return null
}
}
module.exports = inc

@ -1,16 +0,0 @@
const SemVer = require('../classes/semver')
const parse = (version, options, throwErrors = false) => {
if (version instanceof SemVer) {
return version
}
try {
return new SemVer(version, options)
} catch (er) {
if (!throwErrors) {
return null
}
throw er
}
}
module.exports = parse

@ -1,89 +0,0 @@
// just pre-load all the stuff that index.js lazily exports
const internalRe = require('./internal/re')
const constants = require('./internal/constants')
const SemVer = require('./classes/semver')
const identifiers = require('./internal/identifiers')
const parse = require('./functions/parse')
const valid = require('./functions/valid')
const clean = require('./functions/clean')
const inc = require('./functions/inc')
const diff = require('./functions/diff')
const major = require('./functions/major')
const minor = require('./functions/minor')
const patch = require('./functions/patch')
const prerelease = require('./functions/prerelease')
const compare = require('./functions/compare')
const rcompare = require('./functions/rcompare')
const compareLoose = require('./functions/compare-loose')
const compareBuild = require('./functions/compare-build')
const sort = require('./functions/sort')
const rsort = require('./functions/rsort')
const gt = require('./functions/gt')
const lt = require('./functions/lt')
const eq = require('./functions/eq')
const neq = require('./functions/neq')
const gte = require('./functions/gte')
const lte = require('./functions/lte')
const cmp = require('./functions/cmp')
const coerce = require('./functions/coerce')
const Comparator = require('./classes/comparator')
const Range = require('./classes/range')
const satisfies = require('./functions/satisfies')
const toComparators = require('./ranges/to-comparators')
const maxSatisfying = require('./ranges/max-satisfying')
const minSatisfying = require('./ranges/min-satisfying')
const minVersion = require('./ranges/min-version')
const validRange = require('./ranges/valid')
const outside = require('./ranges/outside')
const gtr = require('./ranges/gtr')
const ltr = require('./ranges/ltr')
const intersects = require('./ranges/intersects')
const simplifyRange = require('./ranges/simplify')
const subset = require('./ranges/subset')
module.exports = {
parse,
valid,
clean,
inc,
diff,
major,
minor,
patch,
prerelease,
compare,
rcompare,
compareLoose,
compareBuild,
sort,
rsort,
gt,
lt,
eq,
neq,
gte,
lte,
cmp,
coerce,
Comparator,
Range,
satisfies,
toComparators,
maxSatisfying,
minSatisfying,
minVersion,
validRange,
outside,
gtr,
ltr,
intersects,
simplifyRange,
subset,
SemVer,
re: internalRe.re,
src: internalRe.src,
tokens: internalRe.t,
SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION,
RELEASE_TYPES: constants.RELEASE_TYPES,
compareIdentifiers: identifiers.compareIdentifiers,
rcompareIdentifiers: identifiers.rcompareIdentifiers,
}

@ -1,35 +0,0 @@
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
const SEMVER_SPEC_VERSION = '2.0.0'
const MAX_LENGTH = 256
const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
const MAX_SAFE_COMPONENT_LENGTH = 16
// Max safe length for a build identifier. The max length minus 6 characters for
// the shortest version with a build 0.0.0+BUILD.
const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
const RELEASE_TYPES = [
'major',
'premajor',
'minor',
'preminor',
'patch',
'prepatch',
'prerelease',
]
module.exports = {
MAX_LENGTH,
MAX_SAFE_COMPONENT_LENGTH,
MAX_SAFE_BUILD_LENGTH,
MAX_SAFE_INTEGER,
RELEASE_TYPES,
SEMVER_SPEC_VERSION,
FLAG_INCLUDE_PRERELEASE: 0b001,
FLAG_LOOSE: 0b010,
}

@ -1,15 +0,0 @@
// parse out just the options we care about
const looseOption = Object.freeze({ loose: true })
const emptyOpts = Object.freeze({ })
const parseOptions = options => {
if (!options) {
return emptyOpts
}
if (typeof options !== 'object') {
return looseOption
}
return options
}
module.exports = parseOptions

@ -1,217 +0,0 @@
const {
MAX_SAFE_COMPONENT_LENGTH,
MAX_SAFE_BUILD_LENGTH,
MAX_LENGTH,
} = require('./constants')
const debug = require('./debug')
exports = module.exports = {}
// The actual regexps go on exports.re
const re = exports.re = []
const safeRe = exports.safeRe = []
const src = exports.src = []
const t = exports.t = {}
let R = 0
const LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
const safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
const makeSafeRegex = (value) => {
for (const [token, max] of safeRegexReplacements) {
value = value
.split(`${token}*`).join(`${token}{0,${max}}`)
.split(`${token}+`).join(`${token}{1,${max}}`)
}
return value
}
const createToken = (name, value, isGlobal) => {
const safe = makeSafeRegex(value)
const index = R++
debug(name, index, value)
t[name] = index
src[index] = value
re[index] = new RegExp(value, isGlobal ? 'g' : undefined)
safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined)
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*')
createToken('NUMERICIDENTIFIERLOOSE', '\\d+')
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`)
// ## Main Version
// Three dot-separated numeric identifiers.
createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` +
`(${src[t.NUMERICIDENTIFIER]})\\.` +
`(${src[t.NUMERICIDENTIFIER]})`)
createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
`(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
`(${src[t.NUMERICIDENTIFIERLOOSE]})`)
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER]
}|${src[t.NONNUMERICIDENTIFIER]})`)
createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE]
}|${src[t.NONNUMERICIDENTIFIER]})`)
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER]
}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`)
createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]
}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`)
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`)
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER]
}(?:\\.${src[t.BUILDIDENTIFIER]})*))`)
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
createToken('FULLPLAIN', `v?${src[t.MAINVERSION]
}${src[t.PRERELEASE]}?${
src[t.BUILD]}?`)
createToken('FULL', `^${src[t.FULLPLAIN]}$`)
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE]
}${src[t.PRERELEASELOOSE]}?${
src[t.BUILD]}?`)
createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`)
createToken('GTLT', '((?:<|>)?=?)')
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`)
createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`)
createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
`(?:${src[t.PRERELEASE]})?${
src[t.BUILD]}?` +
`)?)?`)
createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
`(?:${src[t.PRERELEASELOOSE]})?${
src[t.BUILD]}?` +
`)?)?`)
createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`)
createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges.
// Meaning is "reasonably at or greater than"
createToken('LONETILDE', '(?:~>?)')
createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true)
exports.tildeTrimReplace = '$1~'
createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`)
createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`)
// Caret ranges.
// Meaning is "at least and backwards compatible with"
createToken('LONECARET', '(?:\\^)')
createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true)
exports.caretTrimReplace = '$1^'
createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`)
createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`)
// A simple gt/lt/eq thing, or just "" to indicate "any version"
createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`)
createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`)
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT]
}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true)
exports.comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` +
`\\s+-\\s+` +
`(${src[t.XRANGEPLAIN]})` +
`\\s*$`)
createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` +
`\\s+-\\s+` +
`(${src[t.XRANGEPLAINLOOSE]})` +
`\\s*$`)
// Star ranges basically just allow anything at all.
createToken('STAR', '(<|>)?=?\\s*\\*')
// >=0.0.0 is like a star
createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$')
createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$')

@ -1,77 +0,0 @@
{
"name": "semver",
"version": "7.6.3",
"description": "The semantic version parser used by npm.",
"main": "index.js",
"scripts": {
"test": "tap",
"snap": "tap",
"lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"template-oss-apply": "template-oss-apply --force"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.22.0",
"benchmark": "^2.1.4",
"tap": "^16.0.0"
},
"license": "ISC",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/node-semver.git"
},
"bin": {
"semver": "bin/semver.js"
},
"files": [
"bin/",
"lib/",
"classes/",
"functions/",
"internal/",
"ranges/",
"index.js",
"preload.js",
"range.bnf"
],
"tap": {
"timeout": 30,
"coverage-map": "map.js",
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
},
"engines": {
"node": ">=10"
},
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.22.0",
"engines": ">=10",
"distPaths": [
"classes/",
"functions/",
"internal/",
"ranges/",
"index.js",
"preload.js",
"range.bnf"
],
"allowPaths": [
"/classes/",
"/functions/",
"/internal/",
"/ranges/",
"/index.js",
"/preload.js",
"/range.bnf",
"/benchmarks"
],
"publish": "true"
}
}

@ -1,7 +0,0 @@
const Range = require('../classes/range')
const intersects = (r1, r2, options) => {
r1 = new Range(r1, options)
r2 = new Range(r2, options)
return r1.intersects(r2, options)
}
module.exports = intersects

@ -1,247 +0,0 @@
const Range = require('../classes/range.js')
const Comparator = require('../classes/comparator.js')
const { ANY } = Comparator
const satisfies = require('../functions/satisfies.js')
const compare = require('../functions/compare.js')
// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
// - Every simple range `r1, r2, ...` is a null set, OR
// - Every simple range `r1, r2, ...` which is not a null set is a subset of
// some `R1, R2, ...`
//
// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
// - If c is only the ANY comparator
// - If C is only the ANY comparator, return true
// - Else if in prerelease mode, return false
// - else replace c with `[>=0.0.0]`
// - If C is only the ANY comparator
// - if in prerelease mode, return true
// - else replace C with `[>=0.0.0]`
// - Let EQ be the set of = comparators in c
// - If EQ is more than one, return true (null set)
// - Let GT be the highest > or >= comparator in c
// - Let LT be the lowest < or <= comparator in c
// - If GT and LT, and GT.semver > LT.semver, return true (null set)
// - If any C is a = range, and GT or LT are set, return false
// - If EQ
// - If GT, and EQ does not satisfy GT, return true (null set)
// - If LT, and EQ does not satisfy LT, return true (null set)
// - If EQ satisfies every C, return true
// - Else return false
// - If GT
// - If GT.semver is lower than any > or >= comp in C, return false
// - If GT is >=, and GT.semver does not satisfy every C, return false
// - If GT.semver has a prerelease, and not in prerelease mode
// - If no C has a prerelease and the GT.semver tuple, return false
// - If LT
// - If LT.semver is greater than any < or <= comp in C, return false
// - If LT is <=, and LT.semver does not satisfy every C, return false
// - If GT.semver has a prerelease, and not in prerelease mode
// - If no C has a prerelease and the LT.semver tuple, return false
// - Else return true
const subset = (sub, dom, options = {}) => {
if (sub === dom) {
return true
}
sub = new Range(sub, options)
dom = new Range(dom, options)
let sawNonNull = false
OUTER: for (const simpleSub of sub.set) {
for (const simpleDom of dom.set) {
const isSub = simpleSubset(simpleSub, simpleDom, options)
sawNonNull = sawNonNull || isSub !== null
if (isSub) {
continue OUTER
}
}
// the null set is a subset of everything, but null simple ranges in
// a complex range should be ignored. so if we saw a non-null range,
// then we know this isn't a subset, but if EVERY simple range was null,
// then it is a subset.
if (sawNonNull) {
return false
}
}
return true
}
const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')]
const minimumVersion = [new Comparator('>=0.0.0')]
const simpleSubset = (sub, dom, options) => {
if (sub === dom) {
return true
}
if (sub.length === 1 && sub[0].semver === ANY) {
if (dom.length === 1 && dom[0].semver === ANY) {
return true
} else if (options.includePrerelease) {
sub = minimumVersionWithPreRelease
} else {
sub = minimumVersion
}
}
if (dom.length === 1 && dom[0].semver === ANY) {
if (options.includePrerelease) {
return true
} else {
dom = minimumVersion
}
}
const eqSet = new Set()
let gt, lt
for (const c of sub) {
if (c.operator === '>' || c.operator === '>=') {
gt = higherGT(gt, c, options)
} else if (c.operator === '<' || c.operator === '<=') {
lt = lowerLT(lt, c, options)
} else {
eqSet.add(c.semver)
}
}
if (eqSet.size > 1) {
return null
}
let gtltComp
if (gt && lt) {
gtltComp = compare(gt.semver, lt.semver, options)
if (gtltComp > 0) {
return null
} else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) {
return null
}
}
// will iterate one or zero times
for (const eq of eqSet) {
if (gt && !satisfies(eq, String(gt), options)) {
return null
}
if (lt && !satisfies(eq, String(lt), options)) {
return null
}
for (const c of dom) {
if (!satisfies(eq, String(c), options)) {
return false
}
}
return true
}
let higher, lower
let hasDomLT, hasDomGT
// if the subset has a prerelease, we need a comparator in the superset
// with the same tuple and a prerelease, or it's not a subset
let needDomLTPre = lt &&
!options.includePrerelease &&
lt.semver.prerelease.length ? lt.semver : false
let needDomGTPre = gt &&
!options.includePrerelease &&
gt.semver.prerelease.length ? gt.semver : false
// exception: <1.2.3-0 is the same as <1.2.3
if (needDomLTPre && needDomLTPre.prerelease.length === 1 &&
lt.operator === '<' && needDomLTPre.prerelease[0] === 0) {
needDomLTPre = false
}
for (const c of dom) {
hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
if (gt) {
if (needDomGTPre) {
if (c.semver.prerelease && c.semver.prerelease.length &&
c.semver.major === needDomGTPre.major &&
c.semver.minor === needDomGTPre.minor &&
c.semver.patch === needDomGTPre.patch) {
needDomGTPre = false
}
}
if (c.operator === '>' || c.operator === '>=') {
higher = higherGT(gt, c, options)
if (higher === c && higher !== gt) {
return false
}
} else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) {
return false
}
}
if (lt) {
if (needDomLTPre) {
if (c.semver.prerelease && c.semver.prerelease.length &&
c.semver.major === needDomLTPre.major &&
c.semver.minor === needDomLTPre.minor &&
c.semver.patch === needDomLTPre.patch) {
needDomLTPre = false
}
}
if (c.operator === '<' || c.operator === '<=') {
lower = lowerLT(lt, c, options)
if (lower === c && lower !== lt) {
return false
}
} else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) {
return false
}
}
if (!c.operator && (lt || gt) && gtltComp !== 0) {
return false
}
}
// if there was a < or >, and nothing in the dom, then must be false
// UNLESS it was limited by another range in the other direction.
// Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
if (gt && hasDomLT && !lt && gtltComp !== 0) {
return false
}
if (lt && hasDomGT && !gt && gtltComp !== 0) {
return false
}
// we needed a prerelease range in a specific tuple, but didn't get one
// then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0,
// because it includes prereleases in the 1.2.3 tuple
if (needDomGTPre || needDomLTPre) {
return false
}
return true
}
// >=1.2.3 is lower than >1.2.3
const higherGT = (a, b, options) => {
if (!a) {
return b
}
const comp = compare(a.semver, b.semver, options)
return comp > 0 ? a
: comp < 0 ? b
: b.operator === '>' && a.operator === '>=' ? b
: a
}
// <=1.2.3 is higher than <1.2.3
const lowerLT = (a, b, options) => {
if (!a) {
return b
}
const comp = compare(a.semver, b.semver, options)
return comp < 0 ? a
: comp > 0 ? b
: b.operator === '<' && a.operator === '<=' ? b
: a
}
module.exports = subset

@ -1,62 +0,0 @@
{
"name": "@mapbox/node-pre-gyp",
"description": "Node.js native addon binary install tool",
"version": "1.0.11",
"keywords": [
"native",
"addon",
"module",
"c",
"c++",
"bindings",
"binary"
],
"license": "BSD-3-Clause",
"author": "Dane Springmeyer <dane@mapbox.com>",
"repository": {
"type": "git",
"url": "git://github.com/mapbox/node-pre-gyp.git"
},
"bin": "./bin/node-pre-gyp",
"main": "./lib/node-pre-gyp.js",
"dependencies": {
"detect-libc": "^2.0.0",
"https-proxy-agent": "^5.0.0",
"make-dir": "^3.1.0",
"node-fetch": "^2.6.7",
"nopt": "^5.0.0",
"npmlog": "^5.0.1",
"rimraf": "^3.0.2",
"semver": "^7.3.5",
"tar": "^6.1.11"
},
"devDependencies": {
"@mapbox/cloudfriend": "^5.1.0",
"@mapbox/eslint-config-mapbox": "^3.0.0",
"aws-sdk": "^2.1087.0",
"codecov": "^3.8.3",
"eslint": "^7.32.0",
"eslint-plugin-node": "^11.1.0",
"mock-aws-s3": "^4.0.2",
"nock": "^12.0.3",
"node-addon-api": "^4.3.0",
"nyc": "^15.1.0",
"tape": "^5.5.2",
"tar-fs": "^2.1.1"
},
"nyc": {
"all": true,
"skip-full": false,
"exclude": [
"test/**"
]
},
"scripts": {
"coverage": "nyc --all --include index.js --include lib/ npm test",
"upload-coverage": "nyc report --reporter json && codecov --clear --flags=unit --file=./coverage/coverage-final.json",
"lint": "eslint bin/node-pre-gyp lib/*js lib/util/*js test/*js scripts/*js",
"fix": "npm run lint -- --fix",
"update-crosswalk": "node scripts/abi_crosswalk.js",
"test": "tape test/*test.js"
}
}

@ -1,15 +0,0 @@
# Installation
> `npm install --save @types/node`
# Summary
This package contains type definitions for node (https://nodejs.org/).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
### Additional Details
* Last updated: Wed, 21 Aug 2024 01:34:14 GMT
* Dependencies: [undici-types](https://npmjs.com/package/undici-types)
# Credits
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky).

1908
node_modules/@types/node/http.d.ts generated vendored

File diff suppressed because it is too large Load Diff

@ -1,89 +0,0 @@
/**
* License for programmatically and manually incorporated
* documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc
*
* Copyright Node.js contributors. All rights reserved.
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
// NOTE: These definitions support NodeJS and TypeScript 4.9+.
// Reference required types from the default lib:
/// <reference lib="es2020" />
/// <reference lib="esnext.asynciterable" />
/// <reference lib="esnext.intl" />
/// <reference lib="esnext.bigint" />
// Base definitions for all NodeJS modules that are not specific to any version of TypeScript:
/// <reference path="assert.d.ts" />
/// <reference path="assert/strict.d.ts" />
/// <reference path="globals.d.ts" />
/// <reference path="async_hooks.d.ts" />
/// <reference path="buffer.d.ts" />
/// <reference path="child_process.d.ts" />
/// <reference path="cluster.d.ts" />
/// <reference path="console.d.ts" />
/// <reference path="constants.d.ts" />
/// <reference path="crypto.d.ts" />
/// <reference path="dgram.d.ts" />
/// <reference path="diagnostics_channel.d.ts" />
/// <reference path="dns.d.ts" />
/// <reference path="dns/promises.d.ts" />
/// <reference path="dns/promises.d.ts" />
/// <reference path="domain.d.ts" />
/// <reference path="dom-events.d.ts" />
/// <reference path="events.d.ts" />
/// <reference path="fs.d.ts" />
/// <reference path="fs/promises.d.ts" />
/// <reference path="http.d.ts" />
/// <reference path="http2.d.ts" />
/// <reference path="https.d.ts" />
/// <reference path="inspector.d.ts" />
/// <reference path="module.d.ts" />
/// <reference path="net.d.ts" />
/// <reference path="os.d.ts" />
/// <reference path="path.d.ts" />
/// <reference path="perf_hooks.d.ts" />
/// <reference path="process.d.ts" />
/// <reference path="punycode.d.ts" />
/// <reference path="querystring.d.ts" />
/// <reference path="readline.d.ts" />
/// <reference path="readline/promises.d.ts" />
/// <reference path="repl.d.ts" />
/// <reference path="sea.d.ts" />
/// <reference path="stream.d.ts" />
/// <reference path="stream/promises.d.ts" />
/// <reference path="stream/consumers.d.ts" />
/// <reference path="stream/web.d.ts" />
/// <reference path="string_decoder.d.ts" />
/// <reference path="test.d.ts" />
/// <reference path="timers.d.ts" />
/// <reference path="timers/promises.d.ts" />
/// <reference path="tls.d.ts" />
/// <reference path="trace_events.d.ts" />
/// <reference path="tty.d.ts" />
/// <reference path="url.d.ts" />
/// <reference path="util.d.ts" />
/// <reference path="v8.d.ts" />
/// <reference path="vm.d.ts" />
/// <reference path="wasi.d.ts" />
/// <reference path="worker_threads.d.ts" />
/// <reference path="zlib.d.ts" />
/// <reference path="globals.global.d.ts" />

@ -1,217 +0,0 @@
{
"name": "@types/node",
"version": "22.4.2",
"description": "TypeScript definitions for node",
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node",
"license": "MIT",
"contributors": [
{
"name": "Microsoft TypeScript",
"githubUsername": "Microsoft",
"url": "https://github.com/Microsoft"
},
{
"name": "Alberto Schiabel",
"githubUsername": "jkomyno",
"url": "https://github.com/jkomyno"
},
{
"name": "Alvis HT Tang",
"githubUsername": "alvis",
"url": "https://github.com/alvis"
},
{
"name": "Andrew Makarov",
"githubUsername": "r3nya",
"url": "https://github.com/r3nya"
},
{
"name": "Benjamin Toueg",
"githubUsername": "btoueg",
"url": "https://github.com/btoueg"
},
{
"name": "Chigozirim C.",
"githubUsername": "smac89",
"url": "https://github.com/smac89"
},
{
"name": "David Junger",
"githubUsername": "touffy",
"url": "https://github.com/touffy"
},
{
"name": "Deividas Bakanas",
"githubUsername": "DeividasBakanas",
"url": "https://github.com/DeividasBakanas"
},
{
"name": "Eugene Y. Q. Shen",
"githubUsername": "eyqs",
"url": "https://github.com/eyqs"
},
{
"name": "Hannes Magnusson",
"githubUsername": "Hannes-Magnusson-CK",
"url": "https://github.com/Hannes-Magnusson-CK"
},
{
"name": "Huw",
"githubUsername": "hoo29",
"url": "https://github.com/hoo29"
},
{
"name": "Kelvin Jin",
"githubUsername": "kjin",
"url": "https://github.com/kjin"
},
{
"name": "Klaus Meinhardt",
"githubUsername": "ajafff",
"url": "https://github.com/ajafff"
},
{
"name": "Lishude",
"githubUsername": "islishude",
"url": "https://github.com/islishude"
},
{
"name": "Mariusz Wiktorczyk",
"githubUsername": "mwiktorczyk",
"url": "https://github.com/mwiktorczyk"
},
{
"name": "Mohsen Azimi",
"githubUsername": "mohsen1",
"url": "https://github.com/mohsen1"
},
{
"name": "Nikita Galkin",
"githubUsername": "galkin",
"url": "https://github.com/galkin"
},
{
"name": "Parambir Singh",
"githubUsername": "parambirs",
"url": "https://github.com/parambirs"
},
{
"name": "Sebastian Silbermann",
"githubUsername": "eps1lon",
"url": "https://github.com/eps1lon"
},
{
"name": "Thomas den Hollander",
"githubUsername": "ThomasdenH",
"url": "https://github.com/ThomasdenH"
},
{
"name": "Wilco Bakker",
"githubUsername": "WilcoBakker",
"url": "https://github.com/WilcoBakker"
},
{
"name": "wwwy3y3",
"githubUsername": "wwwy3y3",
"url": "https://github.com/wwwy3y3"
},
{
"name": "Samuel Ainsworth",
"githubUsername": "samuela",
"url": "https://github.com/samuela"
},
{
"name": "Kyle Uehlein",
"githubUsername": "kuehlein",
"url": "https://github.com/kuehlein"
},
{
"name": "Thanik Bhongbhibhat",
"githubUsername": "bhongy",
"url": "https://github.com/bhongy"
},
{
"name": "Marcin Kopacz",
"githubUsername": "chyzwar",
"url": "https://github.com/chyzwar"
},
{
"name": "Trivikram Kamat",
"githubUsername": "trivikr",
"url": "https://github.com/trivikr"
},
{
"name": "Junxiao Shi",
"githubUsername": "yoursunny",
"url": "https://github.com/yoursunny"
},
{
"name": "Ilia Baryshnikov",
"githubUsername": "qwelias",
"url": "https://github.com/qwelias"
},
{
"name": "ExE Boss",
"githubUsername": "ExE-Boss",
"url": "https://github.com/ExE-Boss"
},
{
"name": "Piotr Błażejewicz",
"githubUsername": "peterblazejewicz",
"url": "https://github.com/peterblazejewicz"
},
{
"name": "Anna Henningsen",
"githubUsername": "addaleax",
"url": "https://github.com/addaleax"
},
{
"name": "Victor Perin",
"githubUsername": "victorperin",
"url": "https://github.com/victorperin"
},
{
"name": "Yongsheng Zhang",
"githubUsername": "ZYSzys",
"url": "https://github.com/ZYSzys"
},
{
"name": "NodeJS Contributors",
"githubUsername": "NodeJS",
"url": "https://github.com/NodeJS"
},
{
"name": "Linus Unnebäck",
"githubUsername": "LinusU",
"url": "https://github.com/LinusU"
},
{
"name": "wafuwafu13",
"githubUsername": "wafuwafu13",
"url": "https://github.com/wafuwafu13"
},
{
"name": "Matteo Collina",
"githubUsername": "mcollina",
"url": "https://github.com/mcollina"
},
{
"name": "Dmitry Semigradsky",
"githubUsername": "Semigradsky",
"url": "https://github.com/Semigradsky"
}
],
"main": "",
"types": "index.d.ts",
"repository": {
"type": "git",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
"directory": "types/node"
},
"scripts": {},
"dependencies": {
"undici-types": "~6.19.2"
},
"typesPublisherContentHash": "1661354099f570fd618ceda1a789867f3c8f3e5a51051679453227abe7d5b726",
"typeScriptVersion": "4.8"
}

191
node_modules/@types/node/path.d.ts generated vendored

@ -1,191 +0,0 @@
declare module "path/posix" {
import path = require("path");
export = path;
}
declare module "path/win32" {
import path = require("path");
export = path;
}
/**
* The `node:path` module provides utilities for working with file and directory
* paths. It can be accessed using:
*
* ```js
* const path = require('node:path');
* ```
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/path.js)
*/
declare module "path" {
namespace path {
/**
* A parsed path object generated by path.parse() or consumed by path.format().
*/
interface ParsedPath {
/**
* The root of the path such as '/' or 'c:\'
*/
root: string;
/**
* The full directory path such as '/home/user/dir' or 'c:\path\dir'
*/
dir: string;
/**
* The file name including extension (if any) such as 'index.html'
*/
base: string;
/**
* The file extension (if any) such as '.html'
*/
ext: string;
/**
* The file name without extension (if any) such as 'index'
*/
name: string;
}
interface FormatInputPathObject {
/**
* The root of the path such as '/' or 'c:\'
*/
root?: string | undefined;
/**
* The full directory path such as '/home/user/dir' or 'c:\path\dir'
*/
dir?: string | undefined;
/**
* The file name including extension (if any) such as 'index.html'
*/
base?: string | undefined;
/**
* The file extension (if any) such as '.html'
*/
ext?: string | undefined;
/**
* The file name without extension (if any) such as 'index'
*/
name?: string | undefined;
}
interface PlatformPath {
/**
* Normalize a string path, reducing '..' and '.' parts.
* When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used.
*
* @param path string path to normalize.
* @throws {TypeError} if `path` is not a string.
*/
normalize(path: string): string;
/**
* Join all arguments together and normalize the resulting path.
*
* @param paths paths to join.
* @throws {TypeError} if any of the path segments is not a string.
*/
join(...paths: string[]): string;
/**
* The right-most parameter is considered {to}. Other parameters are considered an array of {from}.
*
* Starting from leftmost {from} parameter, resolves {to} to an absolute path.
*
* If {to} isn't already absolute, {from} arguments are prepended in right to left order,
* until an absolute path is found. If after using all {from} paths still no absolute path is found,
* the current working directory is used as well. The resulting path is normalized,
* and trailing slashes are removed unless the path gets resolved to the root directory.
*
* @param paths A sequence of paths or path segments.
* @throws {TypeError} if any of the arguments is not a string.
*/
resolve(...paths: string[]): string;
/**
* Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory.
*
* If the given {path} is a zero-length string, `false` will be returned.
*
* @param path path to test.
* @throws {TypeError} if `path` is not a string.
*/
isAbsolute(path: string): boolean;
/**
* Solve the relative path from {from} to {to} based on the current working directory.
* At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve.
*
* @throws {TypeError} if either `from` or `to` is not a string.
*/
relative(from: string, to: string): string;
/**
* Return the directory name of a path. Similar to the Unix dirname command.
*
* @param path the path to evaluate.
* @throws {TypeError} if `path` is not a string.
*/
dirname(path: string): string;
/**
* Return the last portion of a path. Similar to the Unix basename command.
* Often used to extract the file name from a fully qualified path.
*
* @param path the path to evaluate.
* @param suffix optionally, an extension to remove from the result.
* @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string.
*/
basename(path: string, suffix?: string): string;
/**
* Return the extension of the path, from the last '.' to end of string in the last portion of the path.
* If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string.
*
* @param path the path to evaluate.
* @throws {TypeError} if `path` is not a string.
*/
extname(path: string): string;
/**
* The platform-specific file separator. '\\' or '/'.
*/
readonly sep: "\\" | "/";
/**
* The platform-specific file delimiter. ';' or ':'.
*/
readonly delimiter: ";" | ":";
/**
* Returns an object from a path string - the opposite of format().
*
* @param path path to evaluate.
* @throws {TypeError} if `path` is not a string.
*/
parse(path: string): ParsedPath;
/**
* Returns a path string from an object - the opposite of parse().
*
* @param pathObject path to evaluate.
*/
format(pathObject: FormatInputPathObject): string;
/**
* On Windows systems only, returns an equivalent namespace-prefixed path for the given path.
* If path is not a string, path will be returned without modifications.
* This method is meaningful only on Windows system.
* On POSIX systems, the method is non-operational and always returns path without modifications.
*/
toNamespacedPath(path: string): string;
/**
* Posix specific pathing.
* Same as parent object on posix.
*/
readonly posix: PlatformPath;
/**
* Windows specific pathing.
* Same as parent object on windows
*/
readonly win32: PlatformPath;
}
}
const path: path.PlatformPath;
export = path;
}
declare module "node:path" {
import path = require("path");
export = path;
}
declare module "node:path/posix" {
import path = require("path/posix");
export = path;
}
declare module "node:path/win32" {
import path = require("path/win32");
export = path;
}

File diff suppressed because it is too large Load Diff

@ -1,694 +0,0 @@
/**
* The `node:worker_threads` module enables the use of threads that execute
* JavaScript in parallel. To access it:
*
* ```js
* const worker = require('node:worker_threads');
* ```
*
* Workers (threads) are useful for performing CPU-intensive JavaScript operations.
* They do not help much with I/O-intensive work. The Node.js built-in
* asynchronous I/O operations are more efficient than Workers can be.
*
* Unlike `child_process` or `cluster`, `worker_threads` can share memory. They do
* so by transferring `ArrayBuffer` instances or sharing `SharedArrayBuffer` instances.
*
* ```js
* const {
* Worker, isMainThread, parentPort, workerData,
* } = require('node:worker_threads');
*
* if (isMainThread) {
* module.exports = function parseJSAsync(script) {
* return new Promise((resolve, reject) => {
* const worker = new Worker(__filename, {
* workerData: script,
* });
* worker.on('message', resolve);
* worker.on('error', reject);
* worker.on('exit', (code) => {
* if (code !== 0)
* reject(new Error(`Worker stopped with exit code ${code}`));
* });
* });
* };
* } else {
* const { parse } = require('some-js-parsing-library');
* const script = workerData;
* parentPort.postMessage(parse(script));
* }
* ```
*
* The above example spawns a Worker thread for each `parseJSAsync()` call. In
* practice, use a pool of Workers for these kinds of tasks. Otherwise, the
* overhead of creating Workers would likely exceed their benefit.
*
* When implementing a worker pool, use the `AsyncResource` API to inform
* diagnostic tools (e.g. to provide asynchronous stack traces) about the
* correlation between tasks and their outcomes. See `"Using AsyncResource for a Worker thread pool"` in the `async_hooks` documentation for an example implementation.
*
* Worker threads inherit non-process-specific options by default. Refer to `Worker constructor options` to know how to customize worker thread options,
* specifically `argv` and `execArgv` options.
* @see [source](https://github.com/nodejs/node/blob/v22.x/lib/worker_threads.js)
*/
declare module "worker_threads" {
import { Blob } from "node:buffer";
import { Context } from "node:vm";
import { EventEmitter } from "node:events";
import { EventLoopUtilityFunction } from "node:perf_hooks";
import { FileHandle } from "node:fs/promises";
import { Readable, Writable } from "node:stream";
import { URL } from "node:url";
import { X509Certificate } from "node:crypto";
const isMainThread: boolean;
const parentPort: null | MessagePort;
const resourceLimits: ResourceLimits;
const SHARE_ENV: unique symbol;
const threadId: number;
const workerData: any;
/**
* Instances of the `worker.MessageChannel` class represent an asynchronous,
* two-way communications channel.
* The `MessageChannel` has no methods of its own. `new MessageChannel()` yields an object with `port1` and `port2` properties, which refer to linked `MessagePort` instances.
*
* ```js
* const { MessageChannel } = require('node:worker_threads');
*
* const { port1, port2 } = new MessageChannel();
* port1.on('message', (message) => console.log('received', message));
* port2.postMessage({ foo: 'bar' });
* // Prints: received { foo: 'bar' } from the `port1.on('message')` listener
* ```
* @since v10.5.0
*/
class MessageChannel {
readonly port1: MessagePort;
readonly port2: MessagePort;
}
interface WorkerPerformance {
eventLoopUtilization: EventLoopUtilityFunction;
}
type TransferListItem = ArrayBuffer | MessagePort | FileHandle | X509Certificate | Blob;
/**
* Instances of the `worker.MessagePort` class represent one end of an
* asynchronous, two-way communications channel. It can be used to transfer
* structured data, memory regions and other `MessagePort`s between different `Worker`s.
*
* This implementation matches [browser `MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) s.
* @since v10.5.0
*/
class MessagePort extends EventEmitter {
/**
* Disables further sending of messages on either side of the connection.
* This method can be called when no further communication will happen over this `MessagePort`.
*
* The `'close' event` is emitted on both `MessagePort` instances that
* are part of the channel.
* @since v10.5.0
*/
close(): void;
/**
* Sends a JavaScript value to the receiving side of this channel. `value` is transferred in a way which is compatible with
* the [HTML structured clone algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
*
* In particular, the significant differences to `JSON` are:
*
* * `value` may contain circular references.
* * `value` may contain instances of builtin JS types such as `RegExp`s, `BigInt`s, `Map`s, `Set`s, etc.
* * `value` may contain typed arrays, both using `ArrayBuffer`s
* and `SharedArrayBuffer`s.
* * `value` may contain [`WebAssembly.Module`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Module) instances.
* * `value` may not contain native (C++-backed) objects other than:
*
* ```js
* const { MessageChannel } = require('node:worker_threads');
* const { port1, port2 } = new MessageChannel();
*
* port1.on('message', (message) => console.log(message));
*
* const circularData = {};
* circularData.foo = circularData;
* // Prints: { foo: [Circular] }
* port2.postMessage(circularData);
* ```
*
* `transferList` may be a list of [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer), `MessagePort`, and `FileHandle` objects.
* After transferring, they are not usable on the sending side of the channel
* anymore (even if they are not contained in `value`). Unlike with `child processes`, transferring handles such as network sockets is currently
* not supported.
*
* If `value` contains [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instances, those are accessible
* from either thread. They cannot be listed in `transferList`.
*
* `value` may still contain `ArrayBuffer` instances that are not in `transferList`; in that case, the underlying memory is copied rather than moved.
*
* ```js
* const { MessageChannel } = require('node:worker_threads');
* const { port1, port2 } = new MessageChannel();
*
* port1.on('message', (message) => console.log(message));
*
* const uint8Array = new Uint8Array([ 1, 2, 3, 4 ]);
* // This posts a copy of `uint8Array`:
* port2.postMessage(uint8Array);
* // This does not copy data, but renders `uint8Array` unusable:
* port2.postMessage(uint8Array, [ uint8Array.buffer ]);
*
* // The memory for the `sharedUint8Array` is accessible from both the
* // original and the copy received by `.on('message')`:
* const sharedUint8Array = new Uint8Array(new SharedArrayBuffer(4));
* port2.postMessage(sharedUint8Array);
*
* // This transfers a freshly created message port to the receiver.
* // This can be used, for example, to create communication channels between
* // multiple `Worker` threads that are children of the same parent thread.
* const otherChannel = new MessageChannel();
* port2.postMessage({ port: otherChannel.port1 }, [ otherChannel.port1 ]);
* ```
*
* The message object is cloned immediately, and can be modified after
* posting without having side effects.
*
* For more information on the serialization and deserialization mechanisms
* behind this API, see the `serialization API of the node:v8 module`.
* @since v10.5.0
*/
postMessage(value: any, transferList?: readonly TransferListItem[]): void;
/**
* Opposite of `unref()`. Calling `ref()` on a previously `unref()`ed port does _not_ let the program exit if it's the only active handle left (the default
* behavior). If the port is `ref()`ed, calling `ref()` again has no effect.
*
* If listeners are attached or removed using `.on('message')`, the port
* is `ref()`ed and `unref()`ed automatically depending on whether
* listeners for the event exist.
* @since v10.5.0
*/
ref(): void;
/**
* Calling `unref()` on a port allows the thread to exit if this is the only
* active handle in the event system. If the port is already `unref()`ed calling `unref()` again has no effect.
*
* If listeners are attached or removed using `.on('message')`, the port is `ref()`ed and `unref()`ed automatically depending on whether
* listeners for the event exist.
* @since v10.5.0
*/
unref(): void;
/**
* Starts receiving messages on this `MessagePort`. When using this port
* as an event emitter, this is called automatically once `'message'` listeners are attached.
*
* This method exists for parity with the Web `MessagePort` API. In Node.js,
* it is only useful for ignoring messages when no event listener is present.
* Node.js also diverges in its handling of `.onmessage`. Setting it
* automatically calls `.start()`, but unsetting it lets messages queue up
* until a new handler is set or the port is discarded.
* @since v10.5.0
*/
start(): void;
addListener(event: "close", listener: () => void): this;
addListener(event: "message", listener: (value: any) => void): this;
addListener(event: "messageerror", listener: (error: Error) => void): this;
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
emit(event: "close"): boolean;
emit(event: "message", value: any): boolean;
emit(event: "messageerror", error: Error): boolean;
emit(event: string | symbol, ...args: any[]): boolean;
on(event: "close", listener: () => void): this;
on(event: "message", listener: (value: any) => void): this;
on(event: "messageerror", listener: (error: Error) => void): this;
on(event: string | symbol, listener: (...args: any[]) => void): this;
once(event: "close", listener: () => void): this;
once(event: "message", listener: (value: any) => void): this;
once(event: "messageerror", listener: (error: Error) => void): this;
once(event: string | symbol, listener: (...args: any[]) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "message", listener: (value: any) => void): this;
prependListener(event: "messageerror", listener: (error: Error) => void): this;
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "message", listener: (value: any) => void): this;
prependOnceListener(event: "messageerror", listener: (error: Error) => void): this;
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
removeListener(event: "close", listener: () => void): this;
removeListener(event: "message", listener: (value: any) => void): this;
removeListener(event: "messageerror", listener: (error: Error) => void): this;
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
off(event: "close", listener: () => void): this;
off(event: "message", listener: (value: any) => void): this;
off(event: "messageerror", listener: (error: Error) => void): this;
off(event: string | symbol, listener: (...args: any[]) => void): this;
addEventListener: EventTarget["addEventListener"];
dispatchEvent: EventTarget["dispatchEvent"];
removeEventListener: EventTarget["removeEventListener"];
}
interface WorkerOptions {
/**
* List of arguments which would be stringified and appended to
* `process.argv` in the worker. This is mostly similar to the `workerData`
* but the values will be available on the global `process.argv` as if they
* were passed as CLI options to the script.
*/
argv?: any[] | undefined;
env?: NodeJS.Dict<string> | typeof SHARE_ENV | undefined;
eval?: boolean | undefined;
workerData?: any;
stdin?: boolean | undefined;
stdout?: boolean | undefined;
stderr?: boolean | undefined;
execArgv?: string[] | undefined;
resourceLimits?: ResourceLimits | undefined;
/**
* Additional data to send in the first worker message.
*/
transferList?: TransferListItem[] | undefined;
/**
* @default true
*/
trackUnmanagedFds?: boolean | undefined;
/**
* An optional `name` to be appended to the worker title
* for debuggin/identification purposes, making the final title as
* `[worker ${id}] ${name}`.
*/
name?: string | undefined;
}
interface ResourceLimits {
/**
* The maximum size of a heap space for recently created objects.
*/
maxYoungGenerationSizeMb?: number | undefined;
/**
* The maximum size of the main heap in MB.
*/
maxOldGenerationSizeMb?: number | undefined;
/**
* The size of a pre-allocated memory range used for generated code.
*/
codeRangeSizeMb?: number | undefined;
/**
* The default maximum stack size for the thread. Small values may lead to unusable Worker instances.
* @default 4
*/
stackSizeMb?: number | undefined;
}
/**
* The `Worker` class represents an independent JavaScript execution thread.
* Most Node.js APIs are available inside of it.
*
* Notable differences inside a Worker environment are:
*
* * The `process.stdin`, `process.stdout`, and `process.stderr` streams may be redirected by the parent thread.
* * The `require('node:worker_threads').isMainThread` property is set to `false`.
* * The `require('node:worker_threads').parentPort` message port is available.
* * `process.exit()` does not stop the whole program, just the single thread,
* and `process.abort()` is not available.
* * `process.chdir()` and `process` methods that set group or user ids
* are not available.
* * `process.env` is a copy of the parent thread's environment variables,
* unless otherwise specified. Changes to one copy are not visible in other
* threads, and are not visible to native add-ons (unless `worker.SHARE_ENV` is passed as the `env` option to the `Worker` constructor). On Windows, unlike the main thread, a copy of the
* environment variables operates in a case-sensitive manner.
* * `process.title` cannot be modified.
* * Signals are not delivered through `process.on('...')`.
* * Execution may stop at any point as a result of `worker.terminate()` being invoked.
* * IPC channels from parent processes are not accessible.
* * The `trace_events` module is not supported.
* * Native add-ons can only be loaded from multiple threads if they fulfill `certain conditions`.
*
* Creating `Worker` instances inside of other `Worker`s is possible.
*
* Like [Web Workers](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) and the `node:cluster module`, two-way communication
* can be achieved through inter-thread message passing. Internally, a `Worker` has
* a built-in pair of `MessagePort` s that are already associated with each
* other when the `Worker` is created. While the `MessagePort` object on the parent
* side is not directly exposed, its functionalities are exposed through `worker.postMessage()` and the `worker.on('message')` event
* on the `Worker` object for the parent thread.
*
* To create custom messaging channels (which is encouraged over using the default
* global channel because it facilitates separation of concerns), users can create
* a `MessageChannel` object on either thread and pass one of the`MessagePort`s on that `MessageChannel` to the other thread through a
* pre-existing channel, such as the global one.
*
* See `port.postMessage()` for more information on how messages are passed,
* and what kind of JavaScript values can be successfully transported through
* the thread barrier.
*
* ```js
* const assert = require('node:assert');
* const {
* Worker, MessageChannel, MessagePort, isMainThread, parentPort,
* } = require('node:worker_threads');
* if (isMainThread) {
* const worker = new Worker(__filename);
* const subChannel = new MessageChannel();
* worker.postMessage({ hereIsYourPort: subChannel.port1 }, [subChannel.port1]);
* subChannel.port2.on('message', (value) => {
* console.log('received:', value);
* });
* } else {
* parentPort.once('message', (value) => {
* assert(value.hereIsYourPort instanceof MessagePort);
* value.hereIsYourPort.postMessage('the worker is sending this');
* value.hereIsYourPort.close();
* });
* }
* ```
* @since v10.5.0
*/
class Worker extends EventEmitter {
/**
* If `stdin: true` was passed to the `Worker` constructor, this is a
* writable stream. The data written to this stream will be made available in
* the worker thread as `process.stdin`.
* @since v10.5.0
*/
readonly stdin: Writable | null;
/**
* This is a readable stream which contains data written to `process.stdout` inside the worker thread. If `stdout: true` was not passed to the `Worker` constructor, then data is piped to the
* parent thread's `process.stdout` stream.
* @since v10.5.0
*/
readonly stdout: Readable;
/**
* This is a readable stream which contains data written to `process.stderr` inside the worker thread. If `stderr: true` was not passed to the `Worker` constructor, then data is piped to the
* parent thread's `process.stderr` stream.
* @since v10.5.0
*/
readonly stderr: Readable;
/**
* An integer identifier for the referenced thread. Inside the worker thread,
* it is available as `require('node:worker_threads').threadId`.
* This value is unique for each `Worker` instance inside a single process.
* @since v10.5.0
*/
readonly threadId: number;
/**
* Provides the set of JS engine resource constraints for this Worker thread.
* If the `resourceLimits` option was passed to the `Worker` constructor,
* this matches its values.
*
* If the worker has stopped, the return value is an empty object.
* @since v13.2.0, v12.16.0
*/
readonly resourceLimits?: ResourceLimits | undefined;
/**
* An object that can be used to query performance information from a worker
* instance. Similar to `perf_hooks.performance`.
* @since v15.1.0, v14.17.0, v12.22.0
*/
readonly performance: WorkerPerformance;
/**
* @param filename The path to the Workers main script or module.
* Must be either an absolute path or a relative path (i.e. relative to the current working directory) starting with ./ or ../,
* or a WHATWG URL object using file: protocol. If options.eval is true, this is a string containing JavaScript code rather than a path.
*/
constructor(filename: string | URL, options?: WorkerOptions);
/**
* Send a message to the worker that is received via `require('node:worker_threads').parentPort.on('message')`.
* See `port.postMessage()` for more details.
* @since v10.5.0
*/
postMessage(value: any, transferList?: readonly TransferListItem[]): void;
/**
* Opposite of `unref()`, calling `ref()` on a previously `unref()`ed worker does _not_ let the program exit if it's the only active handle left (the default
* behavior). If the worker is `ref()`ed, calling `ref()` again has
* no effect.
* @since v10.5.0
*/
ref(): void;
/**
* Calling `unref()` on a worker allows the thread to exit if this is the only
* active handle in the event system. If the worker is already `unref()`ed calling `unref()` again has no effect.
* @since v10.5.0
*/
unref(): void;
/**
* Stop all JavaScript execution in the worker thread as soon as possible.
* Returns a Promise for the exit code that is fulfilled when the `'exit' event` is emitted.
* @since v10.5.0
*/
terminate(): Promise<number>;
/**
* Returns a readable stream for a V8 snapshot of the current state of the Worker.
* See `v8.getHeapSnapshot()` for more details.
*
* If the Worker thread is no longer running, which may occur before the `'exit' event` is emitted, the returned `Promise` is rejected
* immediately with an `ERR_WORKER_NOT_RUNNING` error.
* @since v13.9.0, v12.17.0
* @return A promise for a Readable Stream containing a V8 heap snapshot
*/
getHeapSnapshot(): Promise<Readable>;
addListener(event: "error", listener: (err: Error) => void): this;
addListener(event: "exit", listener: (exitCode: number) => void): this;
addListener(event: "message", listener: (value: any) => void): this;
addListener(event: "messageerror", listener: (error: Error) => void): this;
addListener(event: "online", listener: () => void): this;
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
emit(event: "error", err: Error): boolean;
emit(event: "exit", exitCode: number): boolean;
emit(event: "message", value: any): boolean;
emit(event: "messageerror", error: Error): boolean;
emit(event: "online"): boolean;
emit(event: string | symbol, ...args: any[]): boolean;
on(event: "error", listener: (err: Error) => void): this;
on(event: "exit", listener: (exitCode: number) => void): this;
on(event: "message", listener: (value: any) => void): this;
on(event: "messageerror", listener: (error: Error) => void): this;
on(event: "online", listener: () => void): this;
on(event: string | symbol, listener: (...args: any[]) => void): this;
once(event: "error", listener: (err: Error) => void): this;
once(event: "exit", listener: (exitCode: number) => void): this;
once(event: "message", listener: (value: any) => void): this;
once(event: "messageerror", listener: (error: Error) => void): this;
once(event: "online", listener: () => void): this;
once(event: string | symbol, listener: (...args: any[]) => void): this;
prependListener(event: "error", listener: (err: Error) => void): this;
prependListener(event: "exit", listener: (exitCode: number) => void): this;
prependListener(event: "message", listener: (value: any) => void): this;
prependListener(event: "messageerror", listener: (error: Error) => void): this;
prependListener(event: "online", listener: () => void): this;
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
prependOnceListener(event: "error", listener: (err: Error) => void): this;
prependOnceListener(event: "exit", listener: (exitCode: number) => void): this;
prependOnceListener(event: "message", listener: (value: any) => void): this;
prependOnceListener(event: "messageerror", listener: (error: Error) => void): this;
prependOnceListener(event: "online", listener: () => void): this;
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
removeListener(event: "error", listener: (err: Error) => void): this;
removeListener(event: "exit", listener: (exitCode: number) => void): this;
removeListener(event: "message", listener: (value: any) => void): this;
removeListener(event: "messageerror", listener: (error: Error) => void): this;
removeListener(event: "online", listener: () => void): this;
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
off(event: "error", listener: (err: Error) => void): this;
off(event: "exit", listener: (exitCode: number) => void): this;
off(event: "message", listener: (value: any) => void): this;
off(event: "messageerror", listener: (error: Error) => void): this;
off(event: "online", listener: () => void): this;
off(event: string | symbol, listener: (...args: any[]) => void): this;
}
interface BroadcastChannel extends NodeJS.RefCounted {}
/**
* Instances of `BroadcastChannel` allow asynchronous one-to-many communication
* with all other `BroadcastChannel` instances bound to the same channel name.
*
* ```js
* 'use strict';
*
* const {
* isMainThread,
* BroadcastChannel,
* Worker,
* } = require('node:worker_threads');
*
* const bc = new BroadcastChannel('hello');
*
* if (isMainThread) {
* let c = 0;
* bc.onmessage = (event) => {
* console.log(event.data);
* if (++c === 10) bc.close();
* };
* for (let n = 0; n < 10; n++)
* new Worker(__filename);
* } else {
* bc.postMessage('hello from every worker');
* bc.close();
* }
* ```
* @since v15.4.0
*/
class BroadcastChannel {
readonly name: string;
/**
* Invoked with a single \`MessageEvent\` argument when a message is received.
* @since v15.4.0
*/
onmessage: (message: unknown) => void;
/**
* Invoked with a received message cannot be deserialized.
* @since v15.4.0
*/
onmessageerror: (message: unknown) => void;
constructor(name: string);
/**
* Closes the `BroadcastChannel` connection.
* @since v15.4.0
*/
close(): void;
/**
* @since v15.4.0
* @param message Any cloneable JavaScript value.
*/
postMessage(message: unknown): void;
}
/**
* Mark an object as not transferable. If `object` occurs in the transfer list of
* a `port.postMessage()` call, it is ignored.
*
* In particular, this makes sense for objects that can be cloned, rather than
* transferred, and which are used by other objects on the sending side.
* For example, Node.js marks the `ArrayBuffer`s it uses for its `Buffer pool` with this.
*
* This operation cannot be undone.
*
* ```js
* const { MessageChannel, markAsUntransferable } = require('node:worker_threads');
*
* const pooledBuffer = new ArrayBuffer(8);
* const typedArray1 = new Uint8Array(pooledBuffer);
* const typedArray2 = new Float64Array(pooledBuffer);
*
* markAsUntransferable(pooledBuffer);
*
* const { port1 } = new MessageChannel();
* port1.postMessage(typedArray1, [ typedArray1.buffer ]);
*
* // The following line prints the contents of typedArray1 -- it still owns
* // its memory and has been cloned, not transferred. Without
* // `markAsUntransferable()`, this would print an empty Uint8Array.
* // typedArray2 is intact as well.
* console.log(typedArray1);
* console.log(typedArray2);
* ```
*
* There is no equivalent to this API in browsers.
* @since v14.5.0, v12.19.0
*/
function markAsUntransferable(object: object): void;
/**
* Transfer a `MessagePort` to a different `vm` Context. The original `port` object is rendered unusable, and the returned `MessagePort` instance
* takes its place.
*
* The returned `MessagePort` is an object in the target context and
* inherits from its global `Object` class. Objects passed to the [`port.onmessage()`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort/onmessage) listener are also created in the
* target context
* and inherit from its global `Object` class.
*
* However, the created `MessagePort` no longer inherits from [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget), and only
* [`port.onmessage()`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort/onmessage) can be used to receive
* events using it.
* @since v11.13.0
* @param port The message port to transfer.
* @param contextifiedSandbox A `contextified` object as returned by the `vm.createContext()` method.
*/
function moveMessagePortToContext(port: MessagePort, contextifiedSandbox: Context): MessagePort;
/**
* Receive a single message from a given `MessagePort`. If no message is available,`undefined` is returned, otherwise an object with a single `message` property
* that contains the message payload, corresponding to the oldest message in the `MessagePort`'s queue.
*
* ```js
* const { MessageChannel, receiveMessageOnPort } = require('node:worker_threads');
* const { port1, port2 } = new MessageChannel();
* port1.postMessage({ hello: 'world' });
*
* console.log(receiveMessageOnPort(port2));
* // Prints: { message: { hello: 'world' } }
* console.log(receiveMessageOnPort(port2));
* // Prints: undefined
* ```
*
* When this function is used, no `'message'` event is emitted and the `onmessage` listener is not invoked.
* @since v12.3.0
*/
function receiveMessageOnPort(port: MessagePort):
| {
message: any;
}
| undefined;
type Serializable = string | object | number | boolean | bigint;
/**
* Within a worker thread, `worker.getEnvironmentData()` returns a clone
* of data passed to the spawning thread's `worker.setEnvironmentData()`.
* Every new `Worker` receives its own copy of the environment data
* automatically.
*
* ```js
* const {
* Worker,
* isMainThread,
* setEnvironmentData,
* getEnvironmentData,
* } = require('node:worker_threads');
*
* if (isMainThread) {
* setEnvironmentData('Hello', 'World!');
* const worker = new Worker(__filename);
* } else {
* console.log(getEnvironmentData('Hello')); // Prints 'World!'.
* }
* ```
* @since v15.12.0, v14.18.0
* @param key Any arbitrary, cloneable JavaScript value that can be used as a {Map} key.
*/
function getEnvironmentData(key: Serializable): Serializable;
/**
* The `worker.setEnvironmentData()` API sets the content of `worker.getEnvironmentData()` in the current thread and all new `Worker` instances spawned from the current context.
* @since v15.12.0, v14.18.0
* @param key Any arbitrary, cloneable JavaScript value that can be used as a {Map} key.
* @param value Any arbitrary, cloneable JavaScript value that will be cloned and passed automatically to all new `Worker` instances. If `value` is passed as `undefined`, any previously set value
* for the `key` will be deleted.
*/
function setEnvironmentData(key: Serializable, value: Serializable): void;
import {
BroadcastChannel as _BroadcastChannel,
MessageChannel as _MessageChannel,
MessagePort as _MessagePort,
} from "worker_threads";
global {
/**
* `BroadcastChannel` class is a global reference for `require('worker_threads').BroadcastChannel`
* https://nodejs.org/api/globals.html#broadcastchannel
* @since v18.0.0
*/
var BroadcastChannel: typeof globalThis extends {
onmessage: any;
BroadcastChannel: infer T;
} ? T
: typeof _BroadcastChannel;
/**
* `MessageChannel` class is a global reference for `require('worker_threads').MessageChannel`
* https://nodejs.org/api/globals.html#messagechannel
* @since v15.0.0
*/
var MessageChannel: typeof globalThis extends {
onmessage: any;
MessageChannel: infer T;
} ? T
: typeof _MessageChannel;
/**
* `MessagePort` class is a global reference for `require('worker_threads').MessagePort`
* https://nodejs.org/api/globals.html#messageport
* @since v15.0.0
*/
var MessagePort: typeof globalThis extends {
onmessage: any;
MessagePort: infer T;
} ? T
: typeof _MessagePort;
}
}
declare module "node:worker_threads" {
export * from "worker_threads";
}

@ -1,59 +0,0 @@
name: ci
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
strategy:
matrix:
os: [ubuntu-20.04, macos-11.0, windows-2019]
nodeVersion: [14, 16, 18, 20]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.nodeVersion }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.nodeVersion }}
- name: Test
run: npm test
- name: Package
if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')
run: npx node-pre-gyp package
- name: Upload
uses: actions/upload-artifact@v3
if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master'))
with:
name: bcrypt-lib-${{ matrix.os }}-${{ matrix.nodeVersion }}
path: build/stage/**/bcrypt_lib*.tar.gz
build-alpine:
runs-on: ubuntu-latest
strategy:
matrix:
nodeVersion: [14, 16, 18, 20]
container:
image: node:${{ matrix.nodeVersion }}-alpine
steps:
- uses: actions/checkout@v3
- name: Install dependencies
run: |
apk add make g++ python3
- name: Test
run: |
npm test --unsafe-perm
- name: Package
if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')
run: npx node-pre-gyp package --unsafe-perm
- name: Upload
if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master'))
uses: actions/upload-artifact@v3
with:
name: bcrypt-lib-alpine-${{ matrix.nodeVersion }}
path: build/stage/**/bcrypt_lib*.tar.gz

178
node_modules/bcrypt/CHANGELOG.md generated vendored

@ -1,178 +0,0 @@
# 5.1.0 (2022-10-06)
* Update `node-pre-gyp` to 1.0.11
# 5.1.0 (2022-10-06)
* Update `node-pre-gyp` to 1.0.10
* Replace `nodeunit` with `jest` as the testing library
# 5.0.1 (2021-02-22)
* Update `node-pre-gyp` to 1.0.0
# 5.0.0 (2020-06-02)
* Fix the bcrypt "wrap-around" bug. It affects passwords with lengths >= 255.
It is uncommon but it's a bug nevertheless. Previous attempts to fix the bug
was unsuccessful.
* Experimental support for z/OS
* Fix a bug related to NUL in password input
* Update `node-pre-gyp` to 0.15.0
# 4.0.1 (2020-02-27)
* Fix compilation errors in Alpine linux
# 4.0.0 (2020-02-17)
* Switch to NAPI bcrypt
* Drop support for NodeJS 8
# 3.0.8 (2019-12-31)
* Update `node-pre-gyp` to 0.14
* Pre-built binaries for NodeJS 13
# 3.0.7 (2019-10-18)
* Update `nan` to 2.14.0
* Update `node-pre-gyp` to 0.13
# 3.0.6 (2019-04-11)
* Update `nan` to 2.13.2
# 3.0.5 (2019-03-19)
* Update `nan` to 2.13.1
* NodeJS 12 compatibility
* Remove `node-pre-gyp` from bundled dependencies
# 3.0.4-napi (2019-03-08)
* Sync N-API bcrypt with NAN bcrypt
# 3.0.4 (2019-02-07)
* Fix GCC, NAN and V8 deprecation warnings
# 3.0.3 (2018-12-19)
* Update `nan` to 2.12.1
# 3.0.2 (2018-10-18)
* Update `nan` to 2.11.1
# 3.0.1 (2018-09-20)
* Update `nan` to 2.11.0
# 3.0.0 (2018-07-06)
* Drop support for NodeJS <= 4
# 2.0.1 (2018-04-20)
* Update `node-pre-gyp` to allow downloading prebuilt modules
# 2.0.0 (2018-04-07)
* Make `2b` the default bcrypt version
# 1.1.0-napi (2018-01-21)
* Initial support for [N-API](https://nodejs.org/api/n-api.html)
# 1.0.3 (2016-08-23)
* update to nan v2.6.2 for NodeJS 8 support
* Fix: use npm scripts instead of node-gyp directly.
# 1.0.2 (2016-12-31)
* Fix `compare` promise rejection with invalid arguments
# 1.0.1 (2016-12-07)
* Fix destructuring imports with promises
# 1.0.0 (2016-12-04)
* add Promise support (commit 2488473)
# 0.8.7 (2016-06-09)
* update nan to 2.3.5 for improved node v6 support
# 0.8.6 (2016-04-20)
* update nan for node v6 support
# 0.8.5 (2015-08-12)
* update to nan v2 (adds support for iojs 3)
# 0.8.4 (2015-07-24)
* fix deprecation warning for the Encode API
# 0.8.3 (2015-05-06)
* update nan to 1.8.4 for iojs 2.x support
# 0.8.2 (2015-03-28)
* always use callback for generating random bytes to avoid blocking
# 0.8.1 (2015-01-18)
* update NaN to 1.5.0 for iojs support
# 0.8.0 (2014-08-03)
* migrate to NAN for bindings
# v0.5.0
* Fix for issue around empty string params throwing Errors.
* Method deprecation.
* Upgrade from libeio/ev to libuv. (shtylman)
** --- NOTE --- Breaks 0.4.x compatability
* EV_MULTIPLICITY compile flag.
# v0.4.1
* Thread safety fix around OpenSSL (GH-32). (bnoordhuis - through node)
* C++ code changes using delete and new instead of malloc and free. (shtylman)
* Compile options for speed, zoom. (shtylman)
* Move much of the type and variable checking to the JS. (shtylman)
# v0.4.0
* Added getRounds function that will tell you the number of rounds within a hash/salt
# v0.3.2
* Fix api issue with async salt gen first param
# v0.3.1
* Compile under node 0.5.x
# v0.3.0
* Internal Refactoring
* Remove pthread dependencies and locking
* Fix compiler warnings and a memory bug
# v0.2.4
* Use threadsafe functions instead of pthread mutexes
* salt validation to make sure the salt is of the correct size and format
# v0.2.3
* cygwin support
# v0.2.2
* Remove dependency on libbsd, use libssl instead
# v0.2.0
* Added async functionality
* API changes
* hashpw -> encrypt
* all old sync methods now end with _sync
* Removed libbsd(arc4random) dependency...now uses openssl which is more widely spread
# v0.1.2
* Security fix. Wasn't reading rounds in properly and was always only using 4 rounds

61
node_modules/bcrypt/binding.gyp generated vendored

@ -1,61 +0,0 @@
{
"variables": {
"NODE_VERSION%":"<!(node -p \"process.versions.node.split(\\\".\\\")[0]\")"
},
'targets': [
{
'target_name': 'bcrypt_lib',
'sources': [
'src/blowfish.cc',
'src/bcrypt.cc',
'src/bcrypt_node.cc'
],
'defines': [
'_GNU_SOURCE',
],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'include_dirs' : [
"<!@(node -p \"require('node-addon-api').include\")"
],
'dependencies': ["<!(node -p \"require('node-addon-api').gyp\")"],
'conditions': [
['OS=="win"', {
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1
}
},
'defines': [
'uint=unsigned int',
]
}],
['OS=="mac"', {
'cflags+': ['-fvisibility=hidden'],
"xcode_settings": {
"CLANG_CXX_LIBRARY": "libc++",
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
}
}],
['OS=="zos" and NODE_VERSION <= 16',{
'cflags': [
'-qascii',
],
'defines': ["NAPI_DISABLE_CPP_EXCEPTIONS"],
}],
],
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
]
}

@ -1,28 +0,0 @@
var bcrypt = require('../bcrypt');
(async () => {
const start = Date.now();
// genSalt
const salt = await bcrypt.genSalt(10)
console.log('salt: ' + salt);
console.log('salt cb end: ' + (Date.now() - start) + 'ms');
// hash
const crypted = await bcrypt.hash('test', salt)
console.log('crypted: ' + crypted);
console.log('crypted cb end: ' + (Date.now() - start) + 'ms');
console.log('rounds used from hash:', bcrypt.getRounds(crypted));
// compare
const res = await bcrypt.compare('test', crypted)
console.log('compared true: ' + res);
console.log('compared true cb end: ' + (Date.now() - start) + 'ms');
// compare
const res = await bcrypt.compare('bacon', crypted)
console.log('compared false: ' + res);
console.log('compared false cb end: ' + (Date.now() - start) + 'ms');
console.log('end: ' + (Date.now() - start) + 'ms');
})();

Binary file not shown.

67
node_modules/bcrypt/package.json generated vendored

@ -1,67 +0,0 @@
{
"name": "bcrypt",
"description": "A bcrypt library for NodeJS.",
"keywords": [
"bcrypt",
"password",
"auth",
"authentication",
"encryption",
"crypt",
"crypto"
],
"main": "./bcrypt",
"version": "5.1.1",
"author": "Nick Campbell (https://github.com/ncb000gt)",
"engines": {
"node": ">= 10.0.0"
},
"repository": {
"type": "git",
"url": "https://github.com/kelektiv/node.bcrypt.js.git"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/kelektiv/node.bcrypt.js/issues"
},
"scripts": {
"test": "npm ci --build-from-source && jest",
"install": "node-pre-gyp install --fallback-to-build"
},
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",
"node-addon-api": "^5.0.0"
},
"devDependencies": {
"jest": "^29.6.2"
},
"contributors": [
"Antonio Salazar Cardozo <savedfastcool@gmail.com> (https://github.com/Shadowfiend)",
"Van Nguyen <the.gol.effect@gmail.com> (https://github.com/thegoleffect)",
"David Trejo <david@dtrejo.com> (https://github.com/dtrejo)",
"Ben Glow <glen.low@pixelglow.com> (https://github.com/pixelglow)",
"NewITFarmer.com <> (https://github.com/newitfarmer)",
"Alfred Westerveld <alfredwesterveld@gmail.com> (https://github.com/alfredwesterveld)",
"Vincent Côté-Roy <vincentcr@gmail.com> (https://github.com/vincentcr)",
"Lloyd Hilaiel <lloyd@hilaiel.com> (https://github.com/lloyd)",
"Roman Shtylman <shtylman@gmail.com> (https://github.com/shtylman)",
"Vadim Graboys <dimva13@gmail.com> (https://github.com/vadimg)",
"Ben Noorduis <> (https://github.com/bnoordhuis)",
"Nate Rajlich <nathan@tootallnate.net> (https://github.com/tootallnate)",
"Sean McArthur <sean.monstar@gmail.com> (https://github.com/seanmonstar)",
"Fanie Oosthuysen <fanie.oosthuysen@gmail.com> (https://github.com/weareu)",
"Amitosh Swain Mahapatra <amitosh.swain@gmail.com> (https://github.com/Agathver)",
"Corbin Crutchley <crutchcorn@gmail.com> (https://github.com/crutchcorn)",
"Nicola Del Gobbo <nicoladelgobbo@gmail.com> (https://github.com/NickNaso)"
],
"binary": {
"module_name": "bcrypt_lib",
"module_path": "./lib/binding/napi-v{napi_build_version}",
"package_name": "{module_name}-v{version}-napi-v{napi_build_version}-{platform}-{arch}-{libc}.tar.gz",
"host": "https://github.com",
"remote_path": "kelektiv/node.bcrypt.js/releases/download/v{version}",
"napi_versions": [
3
]
}
}

163
node_modules/detect-libc/README.md generated vendored

@ -1,163 +0,0 @@
# detect-libc
Node.js module to detect details of the C standard library (libc)
implementation provided by a given Linux system.
Currently supports detection of GNU glibc and MUSL libc.
Provides asychronous and synchronous functions for the
family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`).
The version numbers of libc implementations
are not guaranteed to be semver-compliant.
For previous v1.x releases, please see the
[v1](https://github.com/lovell/detect-libc/tree/v1) branch.
## Install
```sh
npm install detect-libc
```
## API
### GLIBC
```ts
const GLIBC: string = 'glibc';
```
A String constant containing the value `glibc`.
### MUSL
```ts
const MUSL: string = 'musl';
```
A String constant containing the value `musl`.
### family
```ts
function family(): Promise<string | null>;
```
Resolves asychronously with:
* `glibc` or `musl` when the libc family can be determined
* `null` when the libc family cannot be determined
* `null` when run on a non-Linux platform
```js
const { family, GLIBC, MUSL } = require('detect-libc');
switch (await family()) {
case GLIBC: ...
case MUSL: ...
case null: ...
}
```
### familySync
```ts
function familySync(): string | null;
```
Synchronous version of `family()`.
```js
const { familySync, GLIBC, MUSL } = require('detect-libc');
switch (familySync()) {
case GLIBC: ...
case MUSL: ...
case null: ...
}
```
### version
```ts
function version(): Promise<string | null>;
```
Resolves asychronously with:
* The version when it can be determined
* `null` when the libc family cannot be determined
* `null` when run on a non-Linux platform
```js
const { version } = require('detect-libc');
const v = await version();
if (v) {
const [major, minor, patch] = v.split('.');
}
```
### versionSync
```ts
function versionSync(): string | null;
```
Synchronous version of `version()`.
```js
const { versionSync } = require('detect-libc');
const v = versionSync();
if (v) {
const [major, minor, patch] = v.split('.');
}
```
### isNonGlibcLinux
```ts
function isNonGlibcLinux(): Promise<boolean>;
```
Resolves asychronously with:
* `false` when the libc family is `glibc`
* `true` when the libc family is not `glibc`
* `false` when run on a non-Linux platform
```js
const { isNonGlibcLinux } = require('detect-libc');
if (await isNonGlibcLinux()) { ... }
```
### isNonGlibcLinuxSync
```ts
function isNonGlibcLinuxSync(): boolean;
```
Synchronous version of `isNonGlibcLinux()`.
```js
const { isNonGlibcLinuxSync } = require('detect-libc');
if (isNonGlibcLinuxSync()) { ... }
```
## Licensing
Copyright 2017 Lovell Fuller and others.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -1,14 +0,0 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
export const GLIBC: 'glibc';
export const MUSL: 'musl';
export function family(): Promise<string | null>;
export function familySync(): string | null;
export function isNonGlibcLinux(): Promise<boolean>;
export function isNonGlibcLinuxSync(): boolean;
export function version(): Promise<string | null>;
export function versionSync(): string | null;

@ -1,267 +0,0 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const childProcess = require('child_process');
const { isLinux, getReport } = require('./process');
const { LDD_PATH, readFile, readFileSync } = require('./filesystem');
let cachedFamilyFilesystem;
let cachedVersionFilesystem;
const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true';
let commandOut = '';
const safeCommand = () => {
if (!commandOut) {
return new Promise((resolve) => {
childProcess.exec(command, (err, out) => {
commandOut = err ? ' ' : out;
resolve(commandOut);
});
});
}
return commandOut;
};
const safeCommandSync = () => {
if (!commandOut) {
try {
commandOut = childProcess.execSync(command, { encoding: 'utf8' });
} catch (_err) {
commandOut = ' ';
}
}
return commandOut;
};
/**
* A String constant containing the value `glibc`.
* @type {string}
* @public
*/
const GLIBC = 'glibc';
/**
* A Regexp constant to get the GLIBC Version.
* @type {string}
*/
const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i;
/**
* A String constant containing the value `musl`.
* @type {string}
* @public
*/
const MUSL = 'musl';
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-');
const familyFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return GLIBC;
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return MUSL;
}
}
return null;
};
const familyFromCommand = (out) => {
const [getconf, ldd1] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return GLIBC;
}
if (ldd1 && ldd1.includes(MUSL)) {
return MUSL;
}
return null;
};
const getFamilyFromLddContent = (content) => {
if (content.includes('musl')) {
return MUSL;
}
if (content.includes('GNU C Library')) {
return GLIBC;
}
return null;
};
const familyFromFilesystem = async () => {
if (cachedFamilyFilesystem !== undefined) {
return cachedFamilyFilesystem;
}
cachedFamilyFilesystem = null;
try {
const lddContent = await readFile(LDD_PATH);
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent);
} catch (e) {}
return cachedFamilyFilesystem;
};
const familyFromFilesystemSync = () => {
if (cachedFamilyFilesystem !== undefined) {
return cachedFamilyFilesystem;
}
cachedFamilyFilesystem = null;
try {
const lddContent = readFileSync(LDD_PATH);
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent);
} catch (e) {}
return cachedFamilyFilesystem;
};
/**
* Resolves with the libc family when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const family = async () => {
let family = null;
if (isLinux()) {
family = await familyFromFilesystem();
if (!family) {
family = familyFromReport();
}
if (!family) {
const out = await safeCommand();
family = familyFromCommand(out);
}
}
return family;
};
/**
* Returns the libc family when it can be determined, `null` otherwise.
* @returns {?string}
*/
const familySync = () => {
let family = null;
if (isLinux()) {
family = familyFromFilesystemSync();
if (!family) {
family = familyFromReport();
}
if (!family) {
const out = safeCommandSync();
family = familyFromCommand(out);
}
}
return family;
};
/**
* Resolves `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {Promise<boolean>}
*/
const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC;
/**
* Returns `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {boolean}
*/
const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC;
const versionFromFilesystem = async () => {
if (cachedVersionFilesystem !== undefined) {
return cachedVersionFilesystem;
}
cachedVersionFilesystem = null;
try {
const lddContent = await readFile(LDD_PATH);
const versionMatch = lddContent.match(RE_GLIBC_VERSION);
if (versionMatch) {
cachedVersionFilesystem = versionMatch[1];
}
} catch (e) {}
return cachedVersionFilesystem;
};
const versionFromFilesystemSync = () => {
if (cachedVersionFilesystem !== undefined) {
return cachedVersionFilesystem;
}
cachedVersionFilesystem = null;
try {
const lddContent = readFileSync(LDD_PATH);
const versionMatch = lddContent.match(RE_GLIBC_VERSION);
if (versionMatch) {
cachedVersionFilesystem = versionMatch[1];
}
} catch (e) {}
return cachedVersionFilesystem;
};
const versionFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return report.header.glibcVersionRuntime;
}
return null;
};
const versionSuffix = (s) => s.trim().split(/\s+/)[1];
const versionFromCommand = (out) => {
const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return versionSuffix(getconf);
}
if (ldd1 && ldd2 && ldd1.includes(MUSL)) {
return versionSuffix(ldd2);
}
return null;
};
/**
* Resolves with the libc version when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const version = async () => {
let version = null;
if (isLinux()) {
version = await versionFromFilesystem();
if (!version) {
version = versionFromReport();
}
if (!version) {
const out = await safeCommand();
version = versionFromCommand(out);
}
}
return version;
};
/**
* Returns the libc version when it can be determined, `null` otherwise.
* @returns {?string}
*/
const versionSync = () => {
let version = null;
if (isLinux()) {
version = versionFromFilesystemSync();
if (!version) {
version = versionFromReport();
}
if (!version) {
const out = safeCommandSync();
version = versionFromCommand(out);
}
}
return version;
};
module.exports = {
GLIBC,
MUSL,
family,
familySync,
isNonGlibcLinux,
isNonGlibcLinuxSync,
version,
versionSync
};

@ -1,24 +0,0 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const isLinux = () => process.platform === 'linux';
let report = null;
const getReport = () => {
if (!report) {
/* istanbul ignore next */
if (isLinux() && process.report) {
const orig = process.report.excludeNetwork;
process.report.excludeNetwork = true;
report = process.report.getReport();
process.report.excludeNetwork = orig;
} else {
report = {};
}
}
return report;
};
module.exports = { isLinux, getReport };

@ -1,40 +0,0 @@
{
"name": "detect-libc",
"version": "2.0.3",
"description": "Node.js module to detect the C standard library (libc) implementation family and version",
"main": "lib/detect-libc.js",
"files": [
"lib/",
"index.d.ts"
],
"scripts": {
"test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js",
"bench": "node benchmark/detect-libc",
"bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js"
},
"repository": {
"type": "git",
"url": "git://github.com/lovell/detect-libc"
},
"keywords": [
"libc",
"glibc",
"musl"
],
"author": "Lovell Fuller <npm@lovell.info>",
"contributors": [
"Niklas Salmoukas <niklas@salmoukas.com>",
"Vinícius Lourenço <vinyygamerlol@gmail.com>"
],
"license": "Apache-2.0",
"devDependencies": {
"ava": "^2.4.0",
"benchmark": "^2.1.4",
"nyc": "^15.1.0",
"proxyquire": "^2.1.3",
"semistandard": "^14.2.3"
},
"engines": {
"node": ">=8"
}
}

15
node_modules/minipass/LICENSE generated vendored

@ -1,15 +0,0 @@
The ISC License
Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

769
node_modules/minipass/README.md generated vendored

@ -1,769 +0,0 @@
# minipass
A _very_ minimal implementation of a [PassThrough
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
[It's very
fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing)
for objects, strings, and buffers.
Supports `pipe()`ing (including multi-`pipe()` and backpressure
transmission), buffering data until either a `data` event handler
or `pipe()` is added (so you don't lose the first chunk), and
most other cases where PassThrough is a good idea.
There is a `read()` method, but it's much more efficient to
consume data from this stream via `'data'` events or by calling
`pipe()` into some other stream. Calling `read()` requires the
buffer to be flattened in some cases, which requires copying
memory.
If you set `objectMode: true` in the options, then whatever is
written will be emitted. Otherwise, it'll do a minimal amount of
Buffer copying to ensure proper Streams semantics when `read(n)`
is called.
`objectMode` can also be set by doing `stream.objectMode = true`,
or by writing any non-string/non-buffer data. `objectMode` cannot
be set to false once it is set.
This is not a `through` or `through2` stream. It doesn't
transform the data, it just passes it right through. If you want
to transform the data, extend the class, and override the
`write()` method. Once you're done transforming the data however
you want, call `super.write()` with the transform output.
For some examples of streams that extend Minipass in various
ways, check out:
- [minizlib](http://npm.im/minizlib)
- [fs-minipass](http://npm.im/fs-minipass)
- [tar](http://npm.im/tar)
- [minipass-collect](http://npm.im/minipass-collect)
- [minipass-flush](http://npm.im/minipass-flush)
- [minipass-pipeline](http://npm.im/minipass-pipeline)
- [tap](http://npm.im/tap)
- [tap-parser](http://npm.im/tap-parser)
- [treport](http://npm.im/treport)
- [minipass-fetch](http://npm.im/minipass-fetch)
- [pacote](http://npm.im/pacote)
- [make-fetch-happen](http://npm.im/make-fetch-happen)
- [cacache](http://npm.im/cacache)
- [ssri](http://npm.im/ssri)
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
- [minipass-json-stream](http://npm.im/minipass-json-stream)
- [minipass-sized](http://npm.im/minipass-sized)
## Differences from Node.js Streams
There are several things that make Minipass streams different
from (and in some ways superior to) Node.js core streams.
Please read these caveats if you are familiar with node-core
streams and intend to use Minipass streams in your programs.
You can avoid most of these differences entirely (for a very
small performance penalty) by setting `{async: true}` in the
constructor options.
### Timing
Minipass streams are designed to support synchronous use-cases.
Thus, data is emitted as soon as it is available, always. It is
buffered until read, but no longer. Another way to look at it is
that Minipass streams are exactly as synchronous as the logic
that writes into them.
This can be surprising if your code relies on
`PassThrough.write()` always providing data on the next tick
rather than the current one, or being able to call `resume()` and
not have the entire buffer disappear immediately.
However, without this synchronicity guarantee, there would be no
way for Minipass to achieve the speeds it does, or support the
synchronous use cases that it does. Simply put, waiting takes
time.
This non-deferring approach makes Minipass streams much easier to
reason about, especially in the context of Promises and other
flow-control mechanisms.
Example:
```js
// hybrid module, either works
import { Minipass } from 'minipass'
// or:
const { Minipass } = require('minipass')
const stream = new Minipass()
stream.on('data', () => console.log('data event'))
console.log('before write')
stream.write('hello')
console.log('after write')
// output:
// before write
// data event
// after write
```
### Exception: Async Opt-In
If you wish to have a Minipass stream with behavior that more
closely mimics Node.js core streams, you can set the stream in
async mode either by setting `async: true` in the constructor
options, or by setting `stream.async = true` later on.
```js
// hybrid module, either works
import { Minipass } from 'minipass'
// or:
const { Minipass } = require('minipass')
const asyncStream = new Minipass({ async: true })
asyncStream.on('data', () => console.log('data event'))
console.log('before write')
asyncStream.write('hello')
console.log('after write')
// output:
// before write
// after write
// data event <-- this is deferred until the next tick
```
Switching _out_ of async mode is unsafe, as it could cause data
corruption, and so is not enabled. Example:
```js
import { Minipass } from 'minipass'
const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk))
stream.async = true
console.log('before writes')
stream.write('hello')
setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist!
stream.write('world')
console.log('after writes')
// hypothetical output would be:
// before writes
// world
// after writes
// hello
// NOT GOOD!
```
To avoid this problem, once set into async mode, any attempt to
make the stream sync again will be ignored.
```js
const { Minipass } = require('minipass')
const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk))
stream.async = true
console.log('before writes')
stream.write('hello')
stream.async = false // <-- no-op, stream already async
stream.write('world')
console.log('after writes')
// actual output:
// before writes
// after writes
// hello
// world
```
### No High/Low Water Marks
Node.js core streams will optimistically fill up a buffer,
returning `true` on all writes until the limit is hit, even if
the data has nowhere to go. Then, they will not attempt to draw
more data in until the buffer size dips below a minimum value.
Minipass streams are much simpler. The `write()` method will
return `true` if the data has somewhere to go (which is to say,
given the timing guarantees, that the data is already there by
the time `write()` returns).
If the data has nowhere to go, then `write()` returns false, and
the data sits in a buffer, to be drained out immediately as soon
as anyone consumes it.
Since nothing is ever buffered unnecessarily, there is much less
copying data, and less bookkeeping about buffer capacity levels.
### Hazards of Buffering (or: Why Minipass Is So Fast)
Since data written to a Minipass stream is immediately written
all the way through the pipeline, and `write()` always returns
true/false based on whether the data was fully flushed,
backpressure is communicated immediately to the upstream caller.
This minimizes buffering.
Consider this case:
```js
const { PassThrough } = require('stream')
const p1 = new PassThrough({ highWaterMark: 1024 })
const p2 = new PassThrough({ highWaterMark: 1024 })
const p3 = new PassThrough({ highWaterMark: 1024 })
const p4 = new PassThrough({ highWaterMark: 1024 })
p1.pipe(p2).pipe(p3).pipe(p4)
p4.on('data', () => console.log('made it through'))
// this returns false and buffers, then writes to p2 on next tick (1)
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
// on next tick (4)
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
// 'drain' on next tick (5)
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
// tick (7)
p1.write(Buffer.alloc(2048)) // returns false
```
Along the way, the data was buffered and deferred at each stage,
and multiple event deferrals happened, for an unblocked pipeline
where it was perfectly safe to write all the way through!
Furthermore, setting a `highWaterMark` of `1024` might lead
someone reading the code to think an advisory maximum of 1KiB is
being set for the pipeline. However, the actual advisory
buffering level is the _sum_ of `highWaterMark` values, since
each one has its own bucket.
Consider the Minipass case:
```js
const m1 = new Minipass()
const m2 = new Minipass()
const m3 = new Minipass()
const m4 = new Minipass()
m1.pipe(m2).pipe(m3).pipe(m4)
m4.on('data', () => console.log('made it through'))
// m1 is flowing, so it writes the data to m2 immediately
// m2 is flowing, so it writes the data to m3 immediately
// m3 is flowing, so it writes the data to m4 immediately
// m4 is flowing, so it fires the 'data' event immediately, returns true
// m4's write returned true, so m3 is still flowing, returns true
// m3's write returned true, so m2 is still flowing, returns true
// m2's write returned true, so m1 is still flowing, returns true
// No event deferrals or buffering along the way!
m1.write(Buffer.alloc(2048)) // returns true
```
It is extremely unlikely that you _don't_ want to buffer any data
written, or _ever_ buffer data that can be flushed all the way
through. Neither node-core streams nor Minipass ever fail to
buffer written data, but node-core streams do a lot of
unnecessary buffering and pausing.
As always, the faster implementation is the one that does less
stuff and waits less time to do it.
### Immediately emit `end` for empty streams (when not paused)
If a stream is not paused, and `end()` is called before writing
any data into it, then it will emit `end` immediately.
If you have logic that occurs on the `end` event which you don't
want to potentially happen immediately (for example, closing file
descriptors, moving on to the next entry in an archive parse
stream, etc.) then be sure to call `stream.pause()` on creation,
and then `stream.resume()` once you are ready to respond to the
`end` event.
However, this is _usually_ not a problem because:
### Emit `end` When Asked
One hazard of immediately emitting `'end'` is that you may not
yet have had a chance to add a listener. In order to avoid this
hazard, Minipass streams safely re-emit the `'end'` event if a
new listener is added after `'end'` has been emitted.
Ie, if you do `stream.on('end', someFunction)`, and the stream
has already emitted `end`, then it will call the handler right
away. (You can think of this somewhat like attaching a new
`.then(fn)` to a previously-resolved Promise.)
To prevent calling handlers multiple times who would not expect
multiple ends to occur, all listeners are removed from the
`'end'` event whenever it is emitted.
### Emit `error` When Asked
The most recent error object passed to the `'error'` event is
stored on the stream. If a new `'error'` event handler is added,
and an error was previously emitted, then the event handler will
be called immediately (or on `process.nextTick` in the case of
async streams).
This makes it much more difficult to end up trying to interact
with a broken stream, if the error handler is added after an
error was previously emitted.
### Impact of "immediate flow" on Tee-streams
A "tee stream" is a stream piping to multiple destinations:
```js
const tee = new Minipass()
t.pipe(dest1)
t.pipe(dest2)
t.write('foo') // goes to both destinations
```
Since Minipass streams _immediately_ process any pending data
through the pipeline when a new pipe destination is added, this
can have surprising effects, especially when a stream comes in
from some other function and may or may not have data in its
buffer.
```js
// WARNING! WILL LOSE DATA!
const src = new Minipass()
src.write('foo')
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
src.pipe(dest2) // gets nothing!
```
One solution is to create a dedicated tee-stream junction that
pipes to both locations, and then pipe to _that_ instead.
```js
// Safe example: tee to both places
const src = new Minipass()
src.write('foo')
const tee = new Minipass()
tee.pipe(dest1)
tee.pipe(dest2)
src.pipe(tee) // tee gets 'foo', pipes to both locations
```
The same caveat applies to `on('data')` event listeners. The
first one added will _immediately_ receive all of the data,
leaving nothing for the second:
```js
// WARNING! WILL LOSE DATA!
const src = new Minipass()
src.write('foo')
src.on('data', handler1) // receives 'foo' right away
src.on('data', handler2) // nothing to see here!
```
Using a dedicated tee-stream can be used in this case as well:
```js
// Safe example: tee to both data handlers
const src = new Minipass()
src.write('foo')
const tee = new Minipass()
tee.on('data', handler1)
tee.on('data', handler2)
src.pipe(tee)
```
All of the hazards in this section are avoided by setting `{
async: true }` in the Minipass constructor, or by setting
`stream.async = true` afterwards. Note that this does add some
overhead, so should only be done in cases where you are willing
to lose a bit of performance in order to avoid having to refactor
program logic.
## USAGE
It's a stream! Use it like a stream and it'll most likely do what
you want.
```js
import { Minipass } from 'minipass'
const mp = new Minipass(options) // optional: { encoding, objectMode }
mp.write('foo')
mp.pipe(someOtherStream)
mp.end('bar')
```
### OPTIONS
- `encoding` How would you like the data coming _out_ of the
stream to be encoded? Accepts any values that can be passed to
`Buffer.toString()`.
- `objectMode` Emit data exactly as it comes in. This will be
flipped on by default if you write() something other than a
string or Buffer at any point. Setting `objectMode: true` will
prevent setting any encoding value.
- `async` Defaults to `false`. Set to `true` to defer data
emission until next tick. This reduces performance slightly,
but makes Minipass streams use timing behavior closer to Node
core streams. See [Timing](#timing) for more details.
- `signal` An `AbortSignal` that will cause the stream to unhook
itself from everything and become as inert as possible. Note
that providing a `signal` parameter will make `'error'` events
no longer throw if they are unhandled, but they will still be
emitted to handlers if any are attached.
### API
Implements the user-facing portions of Node.js's `Readable` and
`Writable` streams.
### Methods
- `write(chunk, [encoding], [callback])` - Put data in. (Note
that, in the base Minipass class, the same data will come out.)
Returns `false` if the stream will buffer the next write, or
true if it's still in "flowing" mode.
- `end([chunk, [encoding]], [callback])` - Signal that you have
no more data to write. This will queue an `end` event to be
fired when all the data has been consumed.
- `setEncoding(encoding)` - Set the encoding for data coming of
the stream. This can only be done once.
- `pause()` - No more data for a while, please. This also
prevents `end` from being emitted for empty streams until the
stream is resumed.
- `resume()` - Resume the stream. If there's data in the buffer,
it is all discarded. Any buffered events are immediately
emitted.
- `pipe(dest)` - Send all output to the stream provided. When
data is emitted, it is immediately written to any and all pipe
destinations. (Or written on next tick in `async` mode.)
- `unpipe(dest)` - Stop piping to the destination stream. This is
immediate, meaning that any asynchronously queued data will
_not_ make it to the destination when running in `async` mode.
- `options.end` - Boolean, end the destination stream when the
source stream ends. Default `true`.
- `options.proxyErrors` - Boolean, proxy `error` events from
the source stream to the destination stream. Note that errors
are _not_ proxied after the pipeline terminates, either due
to the source emitting `'end'` or manually unpiping with
`src.unpipe(dest)`. Default `false`.
- `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are
EventEmitters. Some events are given special treatment,
however. (See below under "events".)
- `promise()` - Returns a Promise that resolves when the stream
emits `end`, or rejects if the stream emits `error`.
- `collect()` - Return a Promise that resolves on `end` with an
array containing each chunk of data that was emitted, or
rejects if the stream emits `error`. Note that this consumes
the stream data.
- `concat()` - Same as `collect()`, but concatenates the data
into a single Buffer object. Will reject the returned promise
if the stream is in objectMode, or if it goes into objectMode
by the end of the data.
- `read(n)` - Consume `n` bytes of data out of the buffer. If `n`
is not provided, then consume all of it. If `n` bytes are not
available, then it returns null. **Note** consuming streams in
this way is less efficient, and can lead to unnecessary Buffer
copying.
- `destroy([er])` - Destroy the stream. If an error is provided,
then an `'error'` event is emitted. If the stream has a
`close()` method, and has not emitted a `'close'` event yet,
then `stream.close()` will be called. Any Promises returned by
`.promise()`, `.collect()` or `.concat()` will be rejected.
After being destroyed, writing to the stream will emit an
error. No more data will be emitted if the stream is destroyed,
even if it was previously buffered.
### Properties
- `bufferLength` Read-only. Total number of bytes buffered, or in
the case of objectMode, the total number of objects.
- `encoding` The encoding that has been set. (Setting this is
equivalent to calling `setEncoding(enc)` and has the same
prohibition against setting multiple times.)
- `flowing` Read-only. Boolean indicating whether a chunk written
to the stream will be immediately emitted.
- `emittedEnd` Read-only. Boolean indicating whether the end-ish
events (ie, `end`, `prefinish`, `finish`) have been emitted.
Note that listening on any end-ish event will immediateyl
re-emit it if it has already been emitted.
- `writable` Whether the stream is writable. Default `true`. Set
to `false` when `end()`
- `readable` Whether the stream is readable. Default `true`.
- `pipes` An array of Pipe objects referencing streams that this
stream is piping into.
- `destroyed` A getter that indicates whether the stream was
destroyed.
- `paused` True if the stream has been explicitly paused,
otherwise false.
- `objectMode` Indicates whether the stream is in `objectMode`.
Once set to `true`, it cannot be set to `false`.
- `aborted` Readonly property set when the `AbortSignal`
dispatches an `abort` event.
### Events
- `data` Emitted when there's data to read. Argument is the data
to read. This is never emitted while not flowing. If a listener
is attached, that will resume the stream.
- `end` Emitted when there's no more data to read. This will be
emitted immediately for empty streams when `end()` is called.
If a listener is attached, and `end` was already emitted, then
it will be emitted again. All listeners are removed when `end`
is emitted.
- `prefinish` An end-ish event that follows the same logic as
`end` and is emitted in the same conditions where `end` is
emitted. Emitted after `'end'`.
- `finish` An end-ish event that follows the same logic as `end`
and is emitted in the same conditions where `end` is emitted.
Emitted after `'prefinish'`.
- `close` An indication that an underlying resource has been
released. Minipass does not emit this event, but will defer it
until after `end` has been emitted, since it throws off some
stream libraries otherwise.
- `drain` Emitted when the internal buffer empties, and it is
again suitable to `write()` into the stream.
- `readable` Emitted when data is buffered and ready to be read
by a consumer.
- `resume` Emitted when stream changes state from buffering to
flowing mode. (Ie, when `resume` is called, `pipe` is called,
or a `data` event listener is added.)
### Static Methods
- `Minipass.isStream(stream)` Returns `true` if the argument is a
stream, and false otherwise. To be considered a stream, the
object must be either an instance of Minipass, or an
EventEmitter that has either a `pipe()` method, or both
`write()` and `end()` methods. (Pretty much any stream in
node-land will return `true` for this.)
## EXAMPLES
Here are some examples of things you can do with Minipass
streams.
### simple "are you done yet" promise
```js
mp.promise().then(
() => {
// stream is finished
},
er => {
// stream emitted an error
}
)
```
### collecting
```js
mp.collect().then(all => {
// all is an array of all the data emitted
// encoding is supported in this case, so
// so the result will be a collection of strings if
// an encoding is specified, or buffers/objects if not.
//
// In an async function, you may do
// const data = await stream.collect()
})
```
### collecting into a single blob
This is a bit slower because it concatenates the data into one
chunk for you, but if you're going to do it yourself anyway, it's
convenient this way:
```js
mp.concat().then(onebigchunk => {
// onebigchunk is a string if the stream
// had an encoding set, or a buffer otherwise.
})
```
### iteration
You can iterate over streams synchronously or asynchronously in
platforms that support it.
Synchronous iteration will end when the currently available data
is consumed, even if the `end` event has not been reached. In
string and buffer mode, the data is concatenated, so unless
multiple writes are occurring in the same tick as the `read()`,
sync iteration loops will generally only have a single iteration.
To consume chunks in this way exactly as they have been written,
with no flattening, create the stream with the `{ objectMode:
true }` option.
```js
const mp = new Minipass({ objectMode: true })
mp.write('a')
mp.write('b')
for (let letter of mp) {
console.log(letter) // a, b
}
mp.write('c')
mp.write('d')
for (let letter of mp) {
console.log(letter) // c, d
}
mp.write('e')
mp.end()
for (let letter of mp) {
console.log(letter) // e
}
for (let letter of mp) {
console.log(letter) // nothing
}
```
Asynchronous iteration will continue until the end event is reached,
consuming all of the data.
```js
const mp = new Minipass({ encoding: 'utf8' })
// some source of some data
let i = 5
const inter = setInterval(() => {
if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8'))
else {
mp.end()
clearInterval(inter)
}
}, 100)
// consume the data with asynchronous iteration
async function consume() {
for await (let chunk of mp) {
console.log(chunk)
}
return 'ok'
}
consume().then(res => console.log(res))
// logs `foo\n` 5 times, and then `ok`
```
### subclass that `console.log()`s everything written into it
```js
class Logger extends Minipass {
write(chunk, encoding, callback) {
console.log('WRITE', chunk, encoding)
return super.write(chunk, encoding, callback)
}
end(chunk, encoding, callback) {
console.log('END', chunk, encoding)
return super.end(chunk, encoding, callback)
}
}
someSource.pipe(new Logger()).pipe(someDest)
```
### same thing, but using an inline anonymous class
```js
// js classes are fun
someSource
.pipe(
new (class extends Minipass {
emit(ev, ...data) {
// let's also log events, because debugging some weird thing
console.log('EMIT', ev)
return super.emit(ev, ...data)
}
write(chunk, encoding, callback) {
console.log('WRITE', chunk, encoding)
return super.write(chunk, encoding, callback)
}
end(chunk, encoding, callback) {
console.log('END', chunk, encoding)
return super.end(chunk, encoding, callback)
}
})()
)
.pipe(someDest)
```
### subclass that defers 'end' for some reason
```js
class SlowEnd extends Minipass {
emit(ev, ...args) {
if (ev === 'end') {
console.log('going to end, hold on a sec')
setTimeout(() => {
console.log('ok, ready to end now')
super.emit('end', ...args)
}, 100)
} else {
return super.emit(ev, ...args)
}
}
}
```
### transform that creates newline-delimited JSON
```js
class NDJSONEncode extends Minipass {
write(obj, cb) {
try {
// JSON.stringify can throw, emit an error on that
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
} catch (er) {
this.emit('error', er)
}
}
end(obj, cb) {
if (typeof obj === 'function') {
cb = obj
obj = undefined
}
if (obj !== undefined) {
this.write(obj)
}
return super.end(cb)
}
}
```
### transform that parses newline-delimited JSON
```js
class NDJSONDecode extends Minipass {
constructor (options) {
// always be in object mode, as far as Minipass is concerned
super({ objectMode: true })
this._jsonBuffer = ''
}
write (chunk, encoding, cb) {
if (typeof chunk === 'string' &&
typeof encoding === 'string' &&
encoding !== 'utf8') {
chunk = Buffer.from(chunk, encoding).toString()
} else if (Buffer.isBuffer(chunk)) {
chunk = chunk.toString()
}
if (typeof encoding === 'function') {
cb = encoding
}
const jsonData = (this._jsonBuffer + chunk).split('\n')
this._jsonBuffer = jsonData.pop()
for (let i = 0; i < jsonData.length; i++) {
try {
// JSON.parse can throw, emit an error on that
super.write(JSON.parse(jsonData[i]))
} catch (er) {
this.emit('error', er)
continue
}
}
if (cb)
cb()
}
}
```

152
node_modules/minipass/index.d.ts generated vendored

@ -1,152 +0,0 @@
/// <reference types="node" />
// Note: marking anything protected or private in the exported
// class will limit Minipass's ability to be used as the base
// for mixin classes.
import { EventEmitter } from 'events'
import { Stream } from 'stream'
export namespace Minipass {
export type Encoding = BufferEncoding | 'buffer' | null
export interface Writable extends EventEmitter {
end(): any
write(chunk: any, ...args: any[]): any
}
export interface Readable extends EventEmitter {
pause(): any
resume(): any
pipe(): any
}
export type DualIterable<T> = Iterable<T> & AsyncIterable<T>
export type ContiguousData =
| Buffer
| ArrayBufferLike
| ArrayBufferView
| string
export type BufferOrString = Buffer | string
export interface SharedOptions {
async?: boolean
signal?: AbortSignal
}
export interface StringOptions extends SharedOptions {
encoding: BufferEncoding
objectMode?: boolean
}
export interface BufferOptions extends SharedOptions {
encoding?: null | 'buffer'
objectMode?: boolean
}
export interface ObjectModeOptions extends SharedOptions {
objectMode: true
}
export interface PipeOptions {
end?: boolean
proxyErrors?: boolean
}
export type Options<T> = T extends string
? StringOptions
: T extends Buffer
? BufferOptions
: ObjectModeOptions
}
export class Minipass<
RType extends any = Buffer,
WType extends any = RType extends Minipass.BufferOrString
? Minipass.ContiguousData
: RType
>
extends Stream
implements Minipass.DualIterable<RType>
{
static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable
readonly bufferLength: number
readonly flowing: boolean
readonly writable: boolean
readonly readable: boolean
readonly aborted: boolean
readonly paused: boolean
readonly emittedEnd: boolean
readonly destroyed: boolean
/**
* Technically writable, but mutating it can change the type,
* so is not safe to do in TypeScript.
*/
readonly objectMode: boolean
async: boolean
/**
* Note: encoding is not actually read-only, and setEncoding(enc)
* exists. However, this type definition will insist that TypeScript
* programs declare the type of a Minipass stream up front, and if
* that type is string, then an encoding MUST be set in the ctor. If
* the type is Buffer, then the encoding must be missing, or set to
* 'buffer' or null. If the type is anything else, then objectMode
* must be set in the constructor options. So there is effectively
* no allowed way that a TS program can set the encoding after
* construction, as doing so will destroy any hope of type safety.
* TypeScript does not provide many options for changing the type of
* an object at run-time, which is what changing the encoding does.
*/
readonly encoding: Minipass.Encoding
// setEncoding(encoding: Encoding): void
// Options required if not reading buffers
constructor(
...args: RType extends Buffer
? [] | [Minipass.Options<RType>]
: [Minipass.Options<RType>]
)
write(chunk: WType, cb?: () => void): boolean
write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean
read(size?: number): RType
end(cb?: () => void): this
end(chunk: any, cb?: () => void): this
end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this
pause(): void
resume(): void
promise(): Promise<void>
collect(): Promise<RType[]>
concat(): RType extends Minipass.BufferOrString ? Promise<RType> : never
destroy(er?: any): void
pipe<W extends Minipass.Writable>(dest: W, opts?: Minipass.PipeOptions): W
unpipe<W extends Minipass.Writable>(dest: W): void
/**
* alias for on()
*/
addEventHandler(event: string, listener: (...args: any[]) => any): this
on(event: string, listener: (...args: any[]) => any): this
on(event: 'data', listener: (chunk: RType) => any): this
on(event: 'error', listener: (error: any) => any): this
on(
event:
| 'readable'
| 'drain'
| 'resume'
| 'end'
| 'prefinish'
| 'finish'
| 'close',
listener: () => any
): this
[Symbol.iterator](): Generator<RType, void, void>
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>
}

702
node_modules/minipass/index.js generated vendored

@ -1,702 +0,0 @@
'use strict'
const proc =
typeof process === 'object' && process
? process
: {
stdout: null,
stderr: null,
}
const EE = require('events')
const Stream = require('stream')
const stringdecoder = require('string_decoder')
const SD = stringdecoder.StringDecoder
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFER = Symbol('buffer')
const PIPES = Symbol('pipes')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
// internal event when stream is destroyed
const DESTROYED = Symbol('destroyed')
// internal event when stream has an error
const ERROR = Symbol('error')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
const ABORT = Symbol('abort')
const ABORTED = Symbol('aborted')
const SIGNAL = Symbol('signal')
const defer = fn => Promise.resolve().then(fn)
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR =
(doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
const ITERATOR =
(doIter && Symbol.iterator) || Symbol('iterator not implemented')
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
const isArrayBuffer = b =>
b instanceof ArrayBuffer ||
(typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0)
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
class Pipe {
constructor(src, dest, opts) {
this.src = src
this.dest = dest
this.opts = opts
this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
}
unpipe() {
this.dest.removeListener('drain', this.ondrain)
}
// istanbul ignore next - only here for the prototype
proxyErrors() {}
end() {
this.unpipe()
if (this.opts.end) this.dest.end()
}
}
class PipeProxyErrors extends Pipe {
unpipe() {
this.src.removeListener('error', this.proxyErrors)
super.unpipe()
}
constructor(src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
}
class Minipass extends Stream {
constructor(options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this[PIPES] = []
this[BUFFER] = []
this[OBJECTMODE] = (options && options.objectMode) || false
if (this[OBJECTMODE]) this[ENCODING] = null
else this[ENCODING] = (options && options.encoding) || null
if (this[ENCODING] === 'buffer') this[ENCODING] = null
this[ASYNC] = (options && !!options.async) || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
if (options && options.debugExposeBuffer === true) {
Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
}
if (options && options.debugExposePipes === true) {
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
}
this[SIGNAL] = options && options.signal
this[ABORTED] = false
if (this[SIGNAL]) {
this[SIGNAL].addEventListener('abort', () => this[ABORT]())
if (this[SIGNAL].aborted) {
this[ABORT]()
}
}
}
get bufferLength() {
return this[BUFFERLENGTH]
}
get encoding() {
return this[ENCODING]
}
set encoding(enc) {
if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
if (
this[ENCODING] &&
enc !== this[ENCODING] &&
((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
)
throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this[BUFFER].length)
this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
}
this[ENCODING] = enc
}
setEncoding(enc) {
this.encoding = enc
}
get objectMode() {
return this[OBJECTMODE]
}
set objectMode(om) {
this[OBJECTMODE] = this[OBJECTMODE] || !!om
}
get ['async']() {
return this[ASYNC]
}
set ['async'](a) {
this[ASYNC] = this[ASYNC] || !!a
}
// drop everything and get out of the flow completely
[ABORT]() {
this[ABORTED] = true
this.emit('abort', this[SIGNAL].reason)
this.destroy(this[SIGNAL].reason)
}
get aborted() {
return this[ABORTED]
}
set aborted(_) {}
write(chunk, encoding, cb) {
if (this[ABORTED]) return false
if (this[EOF]) throw new Error('write after end')
if (this[DESTROYED]) {
this.emit(
'error',
Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
)
)
return true
}
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
if (!encoding) encoding = 'utf8'
const fn = this[ASYNC] ? defer : f => f()
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
if (this.flowing) this.emit('data', chunk)
else this[BUFFERPUSH](chunk)
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
if (cb) fn(cb)
return this.flowing
}
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
if (cb) fn(cb)
return this.flowing
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (
typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)
) {
chunk = Buffer.from(chunk, encoding)
}
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
// Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
if (this.flowing) this.emit('data', chunk)
else this[BUFFERPUSH](chunk)
if (this[BUFFERLENGTH] !== 0) this.emit('readable')
if (cb) fn(cb)
return this.flowing
}
read(n) {
if (this[DESTROYED]) return null
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]()
return null
}
if (this[OBJECTMODE]) n = null
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
}
const ret = this[READ](n || null, this[BUFFER][0])
this[MAYBE_EMIT_END]()
return ret
}
[READ](n, chunk) {
if (n === chunk.length || n === null) this[BUFFERSHIFT]()
else {
this[BUFFER][0] = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
this.emit('data', chunk)
if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
return chunk
}
end(chunk, encoding, cb) {
if (typeof chunk === 'function') (cb = chunk), (chunk = null)
if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
if (chunk) this.write(chunk, encoding)
if (cb) this.once('end', cb)
this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
return this
}
// don't let the internal resume be overwritten
[RESUME]() {
if (this[DESTROYED]) return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this[BUFFER].length) this[FLUSH]()
else if (this[EOF]) this[MAYBE_EMIT_END]()
else this.emit('drain')
}
resume() {
return this[RESUME]()
}
pause() {
this[FLOWING] = false
this[PAUSED] = true
}
get destroyed() {
return this[DESTROYED]
}
get flowing() {
return this[FLOWING]
}
get paused() {
return this[PAUSED]
}
[BUFFERPUSH](chunk) {
if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
else this[BUFFERLENGTH] += chunk.length
this[BUFFER].push(chunk)
}
[BUFFERSHIFT]() {
if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
else this[BUFFERLENGTH] -= this[BUFFER][0].length
return this[BUFFER].shift()
}
[FLUSH](noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
}
[FLUSHCHUNK](chunk) {
this.emit('data', chunk)
return this.flowing
}
pipe(dest, opts) {
if (this[DESTROYED]) return
const ended = this[EMITTED_END]
opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr) opts.end = false
else opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
// piping an ended stream ends immediately
if (ended) {
if (opts.end) dest.end()
} else {
this[PIPES].push(
!opts.proxyErrors
? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts)
)
if (this[ASYNC]) defer(() => this[RESUME]())
else this[RESUME]()
}
return dest
}
unpipe(dest) {
const p = this[PIPES].find(p => p.dest === dest)
if (p) {
this[PIPES].splice(this[PIPES].indexOf(p), 1)
p.unpipe()
}
}
addListener(ev, fn) {
return this.on(ev, fn)
}
on(ev, fn) {
const ret = super.on(ev, fn)
if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
else fn.call(this, this[EMITTED_ERROR])
}
return ret
}
get emittedEnd() {
return this[EMITTED_END]
}
[MAYBE_EMIT_END]() {
if (
!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this[BUFFER].length === 0 &&
this[EOF]
) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED]) this.emit('close')
this[EMITTING_END] = false
}
}
emit(ev, data, ...extra) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
return !this[OBJECTMODE] && !data
? false
: this[ASYNC]
? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED]) return
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
super.emit(ERROR, data)
const ret =
!this[SIGNAL] || this.listeners('error').length
? super.emit('error', data)
: false
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
}
// Some other unknown event
const ret = super.emit(ev, data, ...extra)
this[MAYBE_EMIT_END]()
return ret
}
[EMITDATA](data) {
for (const p of this[PIPES]) {
if (p.dest.write(data) === false) this.pause()
}
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
}
[EMITEND]() {
if (this[EMITTED_END]) return
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC]) defer(() => this[EMITEND2]())
else this[EMITEND2]()
}
[EMITEND2]() {
if (this[DECODER]) {
const data = this[DECODER].end()
if (data) {
for (const p of this[PIPES]) {
p.dest.write(data)
}
super.emit('data', data)
}
}
for (const p of this[PIPES]) {
p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
}
// const all = await stream.collect()
collect() {
const buf = []
if (!this[OBJECTMODE]) buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE]) buf.dataLength += c.length
})
return p.then(() => buf)
}
// const data = await stream.concat()
concat() {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING]
? buf.join('')
: Buffer.concat(buf, buf.dataLength)
)
}
// stream.promise().then(() => done, er => emitted error)
promise() {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
// for await (let chunk of stream)
[ASYNCITERATOR]() {
let stopped = false
const stop = () => {
this.pause()
stopped = true
return Promise.resolve({ done: true })
}
const next = () => {
if (stopped) return stop()
const res = this.read()
if (res !== null) return Promise.resolve({ done: false, value: res })
if (this[EOF]) return stop()
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
stop()
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
this.removeListener(DESTROYED, ondestroy)
stop()
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
return {
next,
throw: stop,
return: stop,
[ASYNCITERATOR]() {
return this
},
}
}
// for (let chunk of stream)
[ITERATOR]() {
let stopped = false
const stop = () => {
this.pause()
this.removeListener(ERROR, stop)
this.removeListener(DESTROYED, stop)
this.removeListener('end', stop)
stopped = true
return { done: true }
}
const next = () => {
if (stopped) return stop()
const value = this.read()
return value === null ? stop() : { value }
}
this.once('end', stop)
this.once(ERROR, stop)
this.once(DESTROYED, stop)
return {
next,
throw: stop,
return: stop,
[ITERATOR]() {
return this
},
}
}
destroy(er) {
if (this[DESTROYED]) {
if (er) this.emit('error', er)
else this.emit(DESTROYED)
return this
}
this[DESTROYED] = true
// throw away all buffered data, it's never coming out
this[BUFFER].length = 0
this[BUFFERLENGTH] = 0
if (typeof this.close === 'function' && !this[CLOSED]) this.close()
if (er) this.emit('error', er)
// if no error to emit, still reject pending promises
else this.emit(DESTROYED)
return this
}
static isStream(s) {
return (
!!s &&
(s instanceof Minipass ||
s instanceof Stream ||
(s instanceof EE &&
// readable
(typeof s.pipe === 'function' ||
// writable
(typeof s.write === 'function' && typeof s.end === 'function'))))
)
}
}
exports.Minipass = Minipass

@ -1,76 +0,0 @@
{
"name": "minipass",
"version": "5.0.0",
"description": "minimal implementation of a PassThrough stream",
"main": "./index.js",
"module": "./index.mjs",
"types": "./index.d.ts",
"exports": {
".": {
"import": {
"types": "./index.d.ts",
"default": "./index.mjs"
},
"require": {
"types": "./index.d.ts",
"default": "./index.js"
}
},
"./package.json": "./package.json"
},
"devDependencies": {
"@types/node": "^17.0.41",
"end-of-stream": "^1.4.0",
"node-abort-controller": "^3.1.1",
"prettier": "^2.6.2",
"tap": "^16.2.0",
"through2": "^2.0.3",
"ts-node": "^10.8.1",
"typedoc": "^0.23.24",
"typescript": "^4.7.3"
},
"scripts": {
"pretest": "npm run prepare",
"presnap": "npm run prepare",
"prepare": "node ./scripts/transpile-to-esm.js",
"snap": "tap",
"test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags",
"typedoc": "typedoc ./index.d.ts",
"format": "prettier --write . --loglevel warn"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minipass.git"
},
"keywords": [
"passthrough",
"stream"
],
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC",
"files": [
"index.d.ts",
"index.js",
"index.mjs"
],
"tap": {
"check-coverage": true
},
"engines": {
"node": ">=8"
},
"prettier": {
"semi": false,
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
"jsxSingleQuote": false,
"bracketSameLine": true,
"arrowParens": "avoid",
"endOfLine": "lf"
}
}

@ -1,317 +0,0 @@
NOTE: The default branch has been renamed!
master is now named main
If you have a local clone, you can update it by running:
```shell
git branch -m master main
git fetch origin
git branch -u origin/main main
```
# **node-addon-api module**
This module contains **header-only C++ wrapper classes** which simplify
the use of the C based [Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)
provided by Node.js when using C++. It provides a C++ object model
and exception handling semantics with low overhead.
There are three options for implementing addons: Node-API, nan, or direct
use of internal V8, libuv, and Node.js libraries. Unless there is a need for
direct access to functionality that is not exposed by Node-API as outlined
in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html)
in Node.js core, use Node-API. Refer to
[C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)
for more information on Node-API.
Node-API is an ABI stable C interface provided by Node.js for building native
addons. It is independent of the underlying JavaScript runtime (e.g. V8 or ChakraCore)
and is maintained as part of Node.js itself. It is intended to insulate
native addons from changes in the underlying JavaScript engine and allow
modules compiled for one version to run on later versions of Node.js without
recompilation.
The `node-addon-api` module, which is not part of Node.js, preserves the benefits
of the Node-API as it consists only of inline code that depends only on the stable API
provided by Node-API. As such, modules built against one version of Node.js
using node-addon-api should run without having to be rebuilt with newer versions
of Node.js.
It is important to remember that *other* Node.js interfaces such as
`libuv` (included in a project via `#include <uv.h>`) are not ABI-stable across
Node.js major versions. Thus, an addon must use Node-API and/or `node-addon-api`
exclusively and build against a version of Node.js that includes an
implementation of Node-API (meaning an active LTS version of Node.js) in
order to benefit from ABI stability across Node.js major versions. Node.js
provides an [ABI stability guide][] containing a detailed explanation of ABI
stability in general, and the Node-API ABI stability guarantee in particular.
As new APIs are added to Node-API, node-addon-api must be updated to provide
wrappers for those new APIs. For this reason, node-addon-api provides
methods that allow callers to obtain the underlying Node-API handles so
direct calls to Node-API and the use of the objects/methods provided by
node-addon-api can be used together. For example, in order to be able
to use an API for which the node-addon-api does not yet provide a wrapper.
APIs exposed by node-addon-api are generally used to create and
manipulate JavaScript values. Concepts and operations generally map
to ideas specified in the **ECMA262 Language Specification**.
The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an
excellent orientation and tips for developers just getting started with Node-API
and node-addon-api.
- **[Setup](#setup)**
- **[API Documentation](#api)**
- **[Examples](#examples)**
- **[Tests](#tests)**
- **[More resource and info about native Addons](#resources)**
- **[Badges](#badges)**
- **[Code of Conduct](CODE_OF_CONDUCT.md)**
- **[Contributors](#contributors)**
- **[License](#license)**
## **Current version: 5.1.0**
(See [CHANGELOG.md](CHANGELOG.md) for complete Changelog)
[![NPM](https://nodei.co/npm/node-addon-api.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-addon-api/) [![NPM](https://nodei.co/npm-dl/node-addon-api.png?months=6&height=1)](https://nodei.co/npm/node-addon-api/)
<a name="setup"></a>
node-addon-api is based on [Node-API](https://nodejs.org/api/n-api.html) and supports using different Node-API versions.
This allows addons built with it to run with Node.js versions which support the targeted Node-API version.
**However** the node-addon-api support model is to support only the active LTS Node.js versions. This means that
every year there will be a new major which drops support for the Node.js LTS version which has gone out of service.
The oldest Node.js version supported by the current version of node-addon-api is Node.js 14.x.
## Setup
- [Installation and usage](doc/setup.md)
- [node-gyp](doc/node-gyp.md)
- [cmake-js](doc/cmake-js.md)
- [Conversion tool](doc/conversion-tool.md)
- [Checker tool](doc/checker-tool.md)
- [Generator](doc/generator.md)
- [Prebuild tools](doc/prebuild_tools.md)
<a name="api"></a>
### **API Documentation**
The following is the documentation for node-addon-api.
- [Full Class Hierarchy](doc/hierarchy.md)
- [Addon Structure](doc/addon.md)
- Data Types:
- [Env](doc/env.md)
- [CallbackInfo](doc/callbackinfo.md)
- [Reference](doc/reference.md)
- [Value](doc/value.md)
- [Name](doc/name.md)
- [Symbol](doc/symbol.md)
- [String](doc/string.md)
- [Number](doc/number.md)
- [Date](doc/date.md)
- [BigInt](doc/bigint.md)
- [Boolean](doc/boolean.md)
- [External](doc/external.md)
- [Object](doc/object.md)
- [Array](doc/array.md)
- [ObjectReference](doc/object_reference.md)
- [PropertyDescriptor](doc/property_descriptor.md)
- [Function](doc/function.md)
- [FunctionReference](doc/function_reference.md)
- [ObjectWrap](doc/object_wrap.md)
- [ClassPropertyDescriptor](doc/class_property_descriptor.md)
- [Buffer](doc/buffer.md)
- [ArrayBuffer](doc/array_buffer.md)
- [TypedArray](doc/typed_array.md)
- [TypedArrayOf](doc/typed_array_of.md)
- [DataView](doc/dataview.md)
- [Error Handling](doc/error_handling.md)
- [Error](doc/error.md)
- [TypeError](doc/type_error.md)
- [RangeError](doc/range_error.md)
- [Object Lifetime Management](doc/object_lifetime_management.md)
- [HandleScope](doc/handle_scope.md)
- [EscapableHandleScope](doc/escapable_handle_scope.md)
- [Memory Management](doc/memory_management.md)
- [Async Operations](doc/async_operations.md)
- [AsyncWorker](doc/async_worker.md)
- [AsyncContext](doc/async_context.md)
- [AsyncWorker Variants](doc/async_worker_variants.md)
- [Thread-safe Functions](doc/threadsafe.md)
- [ThreadSafeFunction](doc/threadsafe_function.md)
- [TypedThreadSafeFunction](doc/typed_threadsafe_function.md)
- [Promises](doc/promises.md)
- [Version management](doc/version_management.md)
<a name="examples"></a>
### **Examples**
Are you new to **node-addon-api**? Take a look at our **[examples](https://github.com/nodejs/node-addon-examples)**
- **[Hello World](https://github.com/nodejs/node-addon-examples/tree/HEAD/1_hello_world/node-addon-api)**
- **[Pass arguments to a function](https://github.com/nodejs/node-addon-examples/tree/HEAD/2_function_arguments/node-addon-api)**
- **[Callbacks](https://github.com/nodejs/node-addon-examples/tree/HEAD/3_callbacks/node-addon-api)**
- **[Object factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/4_object_factory/node-addon-api)**
- **[Function factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/5_function_factory/node-addon-api)**
- **[Wrapping C++ Object](https://github.com/nodejs/node-addon-examples/tree/HEAD/6_object_wrap/node-addon-api)**
- **[Factory of wrapped object](https://github.com/nodejs/node-addon-examples/tree/HEAD/7_factory_wrap/node-addon-api)**
- **[Passing wrapped object around](https://github.com/nodejs/node-addon-examples/tree/HEAD/8_passing_wrapped/node-addon-api)**
<a name="tests"></a>
### **Tests**
To run the **node-addon-api** tests do:
```
npm install
npm test
```
To avoid testing the deprecated portions of the API run
```
npm install
npm test --disable-deprecated
```
To run the tests targeting a specific version of Node-API run
```
npm install
export NAPI_VERSION=X
npm test --NAPI_VERSION=X
```
where X is the version of Node-API you want to target.
To run a specific unit test, filter conditions are available
**Example:**
compile and run only tests on objectwrap.cc and objectwrap.js
```
npm run unit --filter=objectwrap
```
Multiple unit tests cane be selected with wildcards
**Example:**
compile and run all test files ending with "reference" -> function_reference.cc, object_reference.cc, reference.cc
```
npm run unit --filter=*reference
```
Multiple filter conditions can be joined to broaden the test selection
**Example:**
compile and run all tests under folders threadsafe_function and typed_threadsafe_function and also the objectwrap.cc file
npm run unit --filter='*function objectwrap'
### **Debug**
To run the **node-addon-api** tests with `--debug` option:
```
npm run-script dev
```
If you want a faster build, you might use the following option:
```
npm run-script dev:incremental
```
Take a look and get inspired by our **[test suite](https://github.com/nodejs/node-addon-api/tree/HEAD/test)**
### **Benchmarks**
You can run the available benchmarks using the following command:
```
npm run-script benchmark
```
See [benchmark/README.md](benchmark/README.md) for more details about running and adding benchmarks.
<a name="resources"></a>
### **More resource and info about native Addons**
- **[C++ Addons](https://nodejs.org/dist/latest/docs/api/addons.html)**
- **[Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)**
- **[Node-API - Next Generation Node API for Native Modules](https://youtu.be/-Oniup60Afs)**
- **[How We Migrated Realm JavaScript From NAN to Node-API](https://developer.mongodb.com/article/realm-javascript-nan-to-n-api)**
As node-addon-api's core mission is to expose the plain C Node-API as C++
wrappers, tools that facilitate n-api/node-addon-api providing more
convenient patterns for developing a Node.js add-on with n-api/node-addon-api
can be published to NPM as standalone packages. It is also recommended to tag
such packages with `node-addon-api` to provide more visibility to the community.
Quick links to NPM searches: [keywords:node-addon-api](https://www.npmjs.com/search?q=keywords%3Anode-addon-api).
<a name="other-bindings"></a>
### **Other bindings**
- **[napi-rs](https://napi.rs)** - (`Rust`)
<a name="badges"></a>
### **Badges**
The use of badges is recommended to indicate the minimum version of Node-API
required for the module. This helps to determine which Node.js major versions are
supported. Addon maintainers can consult the [Node-API support matrix][] to determine
which Node.js versions provide a given Node-API version. The following badges are
available:
![Node-API v1 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v1%20Badge.svg)
![Node-API v2 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v2%20Badge.svg)
![Node-API v3 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v3%20Badge.svg)
![Node-API v4 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v4%20Badge.svg)
![Node-API v5 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v5%20Badge.svg)
![Node-API v6 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v6%20Badge.svg)
![Node-API v7 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v7%20Badge.svg)
![Node-API v8 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v8%20Badge.svg)
![Node-API Experimental Version Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20Experimental%20Version%20Badge.svg)
## **Contributing**
We love contributions from the community to **node-addon-api**!
See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on our philosophy around extending this module.
<a name="contributors"></a>
## Team members
### Active
| Name | GitHub Link |
| ------------------- | ----------------------------------------------------- |
| Anna Henningsen | [addaleax](https://github.com/addaleax) |
| Chengzhong Wu | [legendecas](https://github.com/legendecas) |
| Jack Xia | [JckXia](https://github.com/JckXia) |
| Kevin Eady | [KevinEady](https://github.com/KevinEady) |
| Michael Dawson | [mhdawson](https://github.com/mhdawson) |
| Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) |
| Vladimir Morozov | [vmoroz](https://github.com/vmoroz) |
### Emeritus
| Name | GitHub Link |
| ------------------- | ----------------------------------------------------- |
| Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) |
| Benjamin Byholm | [kkoopa](https://github.com/kkoopa) |
| Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) |
| Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) |
| Jason Ginchereau | [jasongin](https://github.com/jasongin) |
| Jim Schlight | [jschlight](https://github.com/jschlight) |
| Sampson Gao | [sampsongao](https://github.com/sampsongao) |
| Taylor Woll | [boingoing](https://github.com/boingoing) |
<a name="license"></a>
Licensed under [MIT](./LICENSE.md)
[ABI stability guide]: https://nodejs.org/en/docs/guides/abi-stability/
[Node-API support matrix]: https://nodejs.org/dist/latest/docs/api/n-api.html#n_api_n_api_version_matrix

@ -1,11 +0,0 @@
const path = require('path');
const includeDir = path.relative('.', __dirname);
module.exports = {
include: `"${__dirname}"`, // deprecated, can be removed as part of 4.0.0
include_dir: includeDir,
gyp: path.join(includeDir, 'node_api.gyp:nothing'),
isNodeApiBuiltin: true,
needsFlag: false
};

@ -1,186 +0,0 @@
#ifndef SRC_NAPI_INL_DEPRECATED_H_
#define SRC_NAPI_INL_DEPRECATED_H_
////////////////////////////////////////////////////////////////////////////////
// PropertyDescriptor class
////////////////////////////////////////////////////////////////////////////////
template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
const char* utf8name,
Getter getter,
napi_property_attributes attributes,
void* /*data*/) {
using CbData = details::CallbackData<Getter, Napi::Value>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, nullptr});
return PropertyDescriptor({utf8name,
nullptr,
nullptr,
CbData::Wrapper,
nullptr,
nullptr,
attributes,
callbackData});
}
template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
const std::string& utf8name,
Getter getter,
napi_property_attributes attributes,
void* data) {
return Accessor(utf8name.c_str(), getter, attributes, data);
}
template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
napi_value name,
Getter getter,
napi_property_attributes attributes,
void* /*data*/) {
using CbData = details::CallbackData<Getter, Napi::Value>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, nullptr});
return PropertyDescriptor({nullptr,
name,
nullptr,
CbData::Wrapper,
nullptr,
nullptr,
attributes,
callbackData});
}
template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
Name name, Getter getter, napi_property_attributes attributes, void* data) {
napi_value nameValue = name;
return PropertyDescriptor::Accessor(nameValue, getter, attributes, data);
}
template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
const char* utf8name,
Getter getter,
Setter setter,
napi_property_attributes attributes,
void* /*data*/) {
using CbData = details::AccessorCallbackData<Getter, Setter>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, setter, nullptr});
return PropertyDescriptor({utf8name,
nullptr,
nullptr,
CbData::GetterWrapper,
CbData::SetterWrapper,
nullptr,
attributes,
callbackData});
}
template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
const std::string& utf8name,
Getter getter,
Setter setter,
napi_property_attributes attributes,
void* data) {
return Accessor(utf8name.c_str(), getter, setter, attributes, data);
}
template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
napi_value name,
Getter getter,
Setter setter,
napi_property_attributes attributes,
void* /*data*/) {
using CbData = details::AccessorCallbackData<Getter, Setter>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, setter, nullptr});
return PropertyDescriptor({nullptr,
name,
nullptr,
CbData::GetterWrapper,
CbData::SetterWrapper,
nullptr,
attributes,
callbackData});
}
template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(
Name name,
Getter getter,
Setter setter,
napi_property_attributes attributes,
void* data) {
napi_value nameValue = name;
return PropertyDescriptor::Accessor(
nameValue, getter, setter, attributes, data);
}
template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(
const char* utf8name,
Callable cb,
napi_property_attributes attributes,
void* /*data*/) {
using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr)));
using CbData = details::CallbackData<Callable, ReturnType>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({cb, nullptr});
return PropertyDescriptor({utf8name,
nullptr,
CbData::Wrapper,
nullptr,
nullptr,
nullptr,
attributes,
callbackData});
}
template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(
const std::string& utf8name,
Callable cb,
napi_property_attributes attributes,
void* data) {
return Function(utf8name.c_str(), cb, attributes, data);
}
template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(
napi_value name,
Callable cb,
napi_property_attributes attributes,
void* /*data*/) {
using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr)));
using CbData = details::CallbackData<Callable, ReturnType>;
// TODO: Delete when the function is destroyed
auto callbackData = new CbData({cb, nullptr});
return PropertyDescriptor({nullptr,
name,
CbData::Wrapper,
nullptr,
nullptr,
nullptr,
attributes,
callbackData});
}
template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(
Name name, Callable cb, napi_property_attributes attributes, void* data) {
napi_value nameValue = name;
return PropertyDescriptor::Function(nameValue, cb, attributes, data);
}
#endif // !SRC_NAPI_INL_DEPRECATED_H_

File diff suppressed because it is too large Load Diff

3114
node_modules/node-addon-api/napi.h generated vendored

File diff suppressed because it is too large Load Diff

@ -1,456 +0,0 @@
{
"bugs": {
"url": "https://github.com/nodejs/node-addon-api/issues"
},
"contributors": [
{
"name": "Abhishek Kumar Singh",
"url": "https://github.com/abhi11210646"
},
{
"name": "Alba Mendez",
"url": "https://github.com/jmendeth"
},
{
"name": "Alexander Floh",
"url": "https://github.com/alexanderfloh"
},
{
"name": "Ammar Faizi",
"url": "https://github.com/ammarfaizi2"
},
{
"name": "András Timár, Dr",
"url": "https://github.com/timarandras"
},
{
"name": "Andrew Petersen",
"url": "https://github.com/kirbysayshi"
},
{
"name": "Anisha Rohra",
"url": "https://github.com/anisha-rohra"
},
{
"name": "Anna Henningsen",
"url": "https://github.com/addaleax"
},
{
"name": "Arnaud Botella",
"url": "https://github.com/BotellaA"
},
{
"name": "Arunesh Chandra",
"url": "https://github.com/aruneshchandra"
},
{
"name": "Azlan Mukhtar",
"url": "https://github.com/azlan"
},
{
"name": "Ben Berman",
"url": "https://github.com/rivertam"
},
{
"name": "Benjamin Byholm",
"url": "https://github.com/kkoopa"
},
{
"name": "Bill Gallafent",
"url": "https://github.com/gallafent"
},
{
"name": "blagoev",
"url": "https://github.com/blagoev"
},
{
"name": "Bruce A. MacNaughton",
"url": "https://github.com/bmacnaughton"
},
{
"name": "Cory Mickelson",
"url": "https://github.com/corymickelson"
},
{
"name": "Daniel Bevenius",
"url": "https://github.com/danbev"
},
{
"name": "Dante Calderón",
"url": "https://github.com/dantehemerson"
},
{
"name": "Darshan Sen",
"url": "https://github.com/RaisinTen"
},
{
"name": "David Halls",
"url": "https://github.com/davedoesdev"
},
{
"name": "Deepak Rajamohan",
"url": "https://github.com/deepakrkris"
},
{
"name": "Dmitry Ashkadov",
"url": "https://github.com/dmitryash"
},
{
"name": "Dongjin Na",
"url": "https://github.com/nadongguri"
},
{
"name": "Doni Rubiagatra",
"url": "https://github.com/rubiagatra"
},
{
"name": "Eric Bickle",
"url": "https://github.com/ebickle"
},
{
"name": "extremeheat",
"url": "https://github.com/extremeheat"
},
{
"name": "Feng Yu",
"url": "https://github.com/F3n67u"
},
{
"name": "Ferdinand Holzer",
"url": "https://github.com/fholzer"
},
{
"name": "Gabriel Schulhof",
"url": "https://github.com/gabrielschulhof"
},
{
"name": "Guenter Sandner",
"url": "https://github.com/gms1"
},
{
"name": "Gus Caplan",
"url": "https://github.com/devsnek"
},
{
"name": "Helio Frota",
"url": "https://github.com/helio-frota"
},
{
"name": "Hitesh Kanwathirtha",
"url": "https://github.com/digitalinfinity"
},
{
"name": "ikokostya",
"url": "https://github.com/ikokostya"
},
{
"name": "Jack Xia",
"url": "https://github.com/JckXia"
},
{
"name": "Jake Barnes",
"url": "https://github.com/DuBistKomisch"
},
{
"name": "Jake Yoon",
"url": "https://github.com/yjaeseok"
},
{
"name": "Jason Ginchereau",
"url": "https://github.com/jasongin"
},
{
"name": "Jenny",
"url": "https://github.com/egg-bread"
},
{
"name": "Jeroen Janssen",
"url": "https://github.com/japj"
},
{
"name": "Jim Schlight",
"url": "https://github.com/jschlight"
},
{
"name": "Jinho Bang",
"url": "https://github.com/romandev"
},
{
"name": "José Expósito",
"url": "https://github.com/JoseExposito"
},
{
"name": "joshgarde",
"url": "https://github.com/joshgarde"
},
{
"name": "Julian Mesa",
"url": "https://github.com/julianmesa-gitkraken"
},
{
"name": "Kasumi Hanazuki",
"url": "https://github.com/hanazuki"
},
{
"name": "Kelvin",
"url": "https://github.com/kelvinhammond"
},
{
"name": "Kevin Eady",
"url": "https://github.com/KevinEady"
},
{
"name": "Kévin VOYER",
"url": "https://github.com/kecsou"
},
{
"name": "kidneysolo",
"url": "https://github.com/kidneysolo"
},
{
"name": "Koki Nishihara",
"url": "https://github.com/Nishikoh"
},
{
"name": "Konstantin Tarkus",
"url": "https://github.com/koistya"
},
{
"name": "Kyle Farnung",
"url": "https://github.com/kfarnung"
},
{
"name": "Kyle Kovacs",
"url": "https://github.com/nullromo"
},
{
"name": "legendecas",
"url": "https://github.com/legendecas"
},
{
"name": "LongYinan",
"url": "https://github.com/Brooooooklyn"
},
{
"name": "Lovell Fuller",
"url": "https://github.com/lovell"
},
{
"name": "Luciano Martorella",
"url": "https://github.com/lmartorella"
},
{
"name": "mastergberry",
"url": "https://github.com/mastergberry"
},
{
"name": "Mathias Küsel",
"url": "https://github.com/mathiask88"
},
{
"name": "Matteo Collina",
"url": "https://github.com/mcollina"
},
{
"name": "Michael Dawson",
"url": "https://github.com/mhdawson"
},
{
"name": "Michael Price",
"url": "https://github.com/mikepricedev"
},
{
"name": "Michele Campus",
"url": "https://github.com/kYroL01"
},
{
"name": "Mikhail Cheshkov",
"url": "https://github.com/mcheshkov"
},
{
"name": "nempoBu4",
"url": "https://github.com/nempoBu4"
},
{
"name": "Nicola Del Gobbo",
"url": "https://github.com/NickNaso"
},
{
"name": "Nick Soggin",
"url": "https://github.com/iSkore"
},
{
"name": "Nikolai Vavilov",
"url": "https://github.com/seishun"
},
{
"name": "Nurbol Alpysbayev",
"url": "https://github.com/anurbol"
},
{
"name": "pacop",
"url": "https://github.com/pacop"
},
{
"name": "Peter Šándor",
"url": "https://github.com/petersandor"
},
{
"name": "Philipp Renoth",
"url": "https://github.com/DaAitch"
},
{
"name": "rgerd",
"url": "https://github.com/rgerd"
},
{
"name": "Richard Lau",
"url": "https://github.com/richardlau"
},
{
"name": "Rolf Timmermans",
"url": "https://github.com/rolftimmermans"
},
{
"name": "Ross Weir",
"url": "https://github.com/ross-weir"
},
{
"name": "Ryuichi Okumura",
"url": "https://github.com/okuryu"
},
{
"name": "Saint Gabriel",
"url": "https://github.com/chineduG"
},
{
"name": "Sampson Gao",
"url": "https://github.com/sampsongao"
},
{
"name": "Sam Roberts",
"url": "https://github.com/sam-github"
},
{
"name": "strager",
"url": "https://github.com/strager"
},
{
"name": "Taylor Woll",
"url": "https://github.com/boingoing"
},
{
"name": "Thomas Gentilhomme",
"url": "https://github.com/fraxken"
},
{
"name": "Tim Rach",
"url": "https://github.com/timrach"
},
{
"name": "Tobias Nießen",
"url": "https://github.com/tniessen"
},
{
"name": "todoroff",
"url": "https://github.com/todoroff"
},
{
"name": "Tux3",
"url": "https://github.com/tux3"
},
{
"name": "Vlad Velmisov",
"url": "https://github.com/Velmisov"
},
{
"name": "Vladimir Morozov",
"url": "https://github.com/vmoroz"
},
{
"name": "WenheLI",
"url": "https://github.com/WenheLI"
},
{
"name": "Xuguang Mei",
"url": "https://github.com/meixg"
},
{
"name": "Yohei Kishimoto",
"url": "https://github.com/morokosi"
},
{
"name": "Yulong Wang",
"url": "https://github.com/fs-eire"
},
{
"name": "Ziqiu Zhao",
"url": "https://github.com/ZzqiZQute"
},
{
"name": "Feng Yu",
"url": "https://github.com/F3n67u"
}
],
"description": "Node.js API (Node-API)",
"devDependencies": {
"benchmark": "^2.1.4",
"bindings": "^1.5.0",
"clang-format": "^1.4.0",
"eslint": "^7.32.0",
"eslint-config-semistandard": "^16.0.0",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-import": "^2.24.2",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"fs-extra": "^9.0.1",
"path": "^0.12.7",
"pre-commit": "^1.2.2",
"safe-buffer": "^5.1.1"
},
"directories": {},
"gypfile": false,
"homepage": "https://github.com/nodejs/node-addon-api",
"keywords": [
"n-api",
"napi",
"addon",
"native",
"bindings",
"c",
"c++",
"nan",
"node-addon-api"
],
"license": "MIT",
"main": "index.js",
"name": "node-addon-api",
"readme": "README.md",
"repository": {
"type": "git",
"url": "git://github.com/nodejs/node-addon-api.git"
},
"files": [
"*.{c,h,gyp,gypi}",
"package-support.json",
"tools/"
],
"scripts": {
"prebenchmark": "node-gyp rebuild -C benchmark",
"benchmark": "node benchmark",
"pretest": "node-gyp rebuild -C test",
"test": "node test",
"test:debug": "node-gyp rebuild -C test --debug && NODE_API_BUILD_CONFIG=Debug node ./test/index.js",
"predev": "node-gyp rebuild -C test --debug",
"dev": "node test",
"predev:incremental": "node-gyp configure build -C test --debug",
"dev:incremental": "node test",
"doc": "doxygen doc/Doxyfile",
"lint": "node tools/eslint-format && node tools/clang-format",
"lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix"
},
"pre-commit": "lint",
"version": "5.1.0",
"support": true
}

@ -1,99 +0,0 @@
'use strict';
// Descend into a directory structure and, for each file matching *.node, output
// based on the imports found in the file whether it's an N-API module or not.
const fs = require('fs');
const path = require('path');
// Read the output of the command, break it into lines, and use the reducer to
// decide whether the file is an N-API module or not.
function checkFile (file, command, argv, reducer) {
const child = require('child_process').spawn(command, argv, {
stdio: ['inherit', 'pipe', 'inherit']
});
let leftover = '';
let isNapi;
child.stdout.on('data', (chunk) => {
if (isNapi === undefined) {
chunk = (leftover + chunk.toString()).split(/[\r\n]+/);
leftover = chunk.pop();
isNapi = chunk.reduce(reducer, isNapi);
if (isNapi !== undefined) {
child.kill();
}
}
});
child.on('close', (code, signal) => {
if ((code === null && signal !== null) || (code !== 0)) {
console.log(
command + ' exited with code: ' + code + ' and signal: ' + signal);
} else {
// Green if it's a N-API module, red otherwise.
console.log(
'\x1b[' + (isNapi ? '42' : '41') + 'm' +
(isNapi ? ' N-API' : 'Not N-API') +
'\x1b[0m: ' + file);
}
});
}
// Use nm -a to list symbols.
function checkFileUNIX (file) {
checkFile(file, 'nm', ['-a', file], (soFar, line) => {
if (soFar === undefined) {
line = line.match(/([0-9a-f]*)? ([a-zA-Z]) (.*$)/);
if (line[2] === 'U') {
if (/^napi/.test(line[3])) {
soFar = true;
}
}
}
return soFar;
});
}
// Use dumpbin /imports to list symbols.
function checkFileWin32 (file) {
checkFile(file, 'dumpbin', ['/imports', file], (soFar, line) => {
if (soFar === undefined) {
line = line.match(/([0-9a-f]*)? +([a-zA-Z0-9]) (.*$)/);
if (line && /^napi/.test(line[line.length - 1])) {
soFar = true;
}
}
return soFar;
});
}
// Descend into a directory structure and pass each file ending in '.node' to
// one of the above checks, depending on the OS.
function recurse (top) {
fs.readdir(top, (error, items) => {
if (error) {
throw new Error('error reading directory ' + top + ': ' + error);
}
items.forEach((item) => {
item = path.join(top, item);
fs.stat(item, ((item) => (error, stats) => {
if (error) {
throw new Error('error about ' + item + ': ' + error);
}
if (stats.isDirectory()) {
recurse(item);
} else if (/[.]node$/.test(item) &&
// Explicitly ignore files called 'nothing.node' because they are
// artefacts of node-addon-api having identified a version of
// Node.js that ships with a correct implementation of N-API.
path.basename(item) !== 'nothing.node') {
process.platform === 'win32'
? checkFileWin32(item)
: checkFileUNIX(item);
}
})(item));
});
});
}
// Start with the directory given on the command line or the current directory
// if nothing was given.
recurse(process.argv.length > 3 ? process.argv[2] : '.');

@ -1,301 +0,0 @@
#! /usr/bin/env node
'use strict';
const fs = require('fs');
const path = require('path');
const args = process.argv.slice(2);
const dir = args[0];
if (!dir) {
console.log('Usage: node ' + path.basename(__filename) + ' <target-dir>');
process.exit(1);
}
const NodeApiVersion = require('../package.json').version;
const disable = args[1];
let ConfigFileOperations;
if (disable !== '--disable' && dir !== '--disable') {
ConfigFileOperations = {
'package.json': [
[/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'],
[/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, '']
],
'binding.gyp': [
[/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','],
[/([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \\"require(\'node-addon-api\').include_dir\\")",'],
[/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''],
[/([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2xcode_settings$2: { $2GCC_ENABLE_CPP_EXCEPTIONS$2: $2YES$2,\n $2CLANG_CXX_LIBRARY$2: $2libc++$2,\n $2MACOSX_DEPLOYMENT_TARGET$2: $210.7$2,\n },\n $2msvs_settings$2: {\n $2VCCLCompilerTool$2: { $2ExceptionHandling$2: 1 },\n },']
]
};
} else {
ConfigFileOperations = {
'package.json': [
[/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'],
[/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, '']
],
'binding.gyp': [
[/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','],
[/([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \'require(\\"node-addon-api\\").include_dir\')",'],
[/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''],
[/([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2defines$2: [ $2NAPI_DISABLE_CPP_EXCEPTIONS$2 ],\n $2conditions$2: [\n [\'OS=="win"\', { $2defines$2: [ $2_HAS_EXCEPTIONS=1$2 ] }]\n ]']
]
};
}
const SourceFileOperations = [
[/Nan::SetMethod\(target,[\s]*"(.*)"[\s]*,[\s]*([^)]+)\)/g, 'exports.Set(Napi::String::New(env, "$1"), Napi::Function::New(env, $2))'],
[/v8::Local<v8::FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'],
[/Local<FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'],
[/Local<FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'],
[/Nan::New<v8::FunctionTemplate>\(([\w\d:]+)\)->GetFunction\(\)/g, 'Napi::Function::New(env, $1)'],
[/Nan::New<FunctionTemplate>\(([\w\d:]+)\)->GetFunction()/g, 'Napi::Function::New(env, $1);'],
[/Nan::New<v8::FunctionTemplate>\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'],
[/Nan::New<FunctionTemplate>\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'],
// FunctionTemplate to FunctionReference
[/Nan::Persistent<(v8::)*FunctionTemplate>/g, 'Napi::FunctionReference'],
[/Nan::Persistent<(v8::)*Function>/g, 'Napi::FunctionReference'],
[/v8::Local<v8::FunctionTemplate>/g, 'Napi::FunctionReference'],
[/Local<FunctionTemplate>/g, 'Napi::FunctionReference'],
[/v8::FunctionTemplate/g, 'Napi::FunctionReference'],
[/FunctionTemplate/g, 'Napi::FunctionReference'],
[/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'],
[/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm,
'});\n\n' +
'$1constructor = Napi::Persistent($3);\n' +
'$1constructor.SuppressDestruct();\n' +
'$1target.Set("$2", $3);'],
// TODO: Other attribute combinations
[/static_cast<PropertyAttribute>\(ReadOnly\s*\|\s*DontDelete\)/gm,
'static_cast<napi_property_attributes>(napi_enumerable | napi_configurable)'],
[/([\w\d:<>]+?)::Cast\((.+?)\)/g, '$2.As<$1>()'],
[/\*Nan::Utf8String\(([^)]+)\)/g, '$1->As<Napi::String>().Utf8Value().c_str()'],
[/Nan::Utf8String +(\w+)\(([^)]+)\)/g, 'std::string $1 = $2.As<Napi::String>()'],
[/Nan::Utf8String/g, 'std::string'],
[/v8::String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'],
[/String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'],
[/\.length\(\)/g, '.Length()'],
[/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'],
[/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'],
[/(\w+)\(([^)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3'],
// HandleOKCallback to OnOK
[/HandleOKCallback/g, 'OnOK'],
// HandleErrorCallback to OnError
[/HandleErrorCallback/g, 'OnError'],
// ex. .As<Function>() to .As<Napi::Object>()
[/\.As<v8::(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>\(\)/g, '.As<Napi::$1>()'],
[/\.As<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>\(\)/g, '.As<Napi::$1>()'],
// ex. Nan::New<Number>(info[0]) to Napi::Number::New(info[0])
[/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'],
[/Nan::New\(([0-9.]+)\)/g, 'Napi::Number::New(env, $1)'],
[/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'],
[/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'],
[/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'],
[/Nan::New<(.+?)>\(\)/g, 'Napi::$1::New(env)'],
[/Nan::New<(v8::)*(.+?)>\(/g, 'Napi::$2::New(env, '],
[/Nan::New<(.+?)>\(/g, 'Napi::$1::New(env, '],
[/Nan::NewBuffer\(/g, 'Napi::Buffer<char>::New(env, '],
// TODO: Properly handle this
[/Nan::New\(/g, 'Napi::New(env, '],
[/\.IsInt32\(\)/g, '.IsNumber()'],
[/->IsInt32\(\)/g, '.IsNumber()'],
[/(.+?)->BooleanValue\(\)/g, '$1.As<Napi::Boolean>().Value()'],
[/(.+?)->Int32Value\(\)/g, '$1.As<Napi::Number>().Int32Value()'],
[/(.+?)->Uint32Value\(\)/g, '$1.As<Napi::Number>().Uint32Value()'],
[/(.+?)->IntegerValue\(\)/g, '$1.As<Napi::Number>().Int64Value()'],
[/(.+?)->NumberValue\(\)/g, '$1.As<Napi::Number>().DoubleValue()'],
// ex. Nan::To<bool>(info[0]) to info[0].Value()
[/Nan::To<v8::(Boolean|String|Number|Object|Array|Symbol|Function)>\((.+?)\)/g, '$2.To<Napi::$1>()'],
[/Nan::To<(Boolean|String|Number|Object|Array|Symbol|Function)>\((.+?)\)/g, '$2.To<Napi::$1>()'],
// ex. Nan::To<bool>(info[0]) to info[0].As<Napi::Boolean>().Value()
[/Nan::To<bool>\((.+?)\)/g, '$1.As<Napi::Boolean>().Value()'],
// ex. Nan::To<int>(info[0]) to info[0].As<Napi::Number>().Int32Value()
[/Nan::To<int>\((.+?)\)/g, '$1.As<Napi::Number>().Int32Value()'],
// ex. Nan::To<int32_t>(info[0]) to info[0].As<Napi::Number>().Int32Value()
[/Nan::To<int32_t>\((.+?)\)/g, '$1.As<Napi::Number>().Int32Value()'],
// ex. Nan::To<uint32_t>(info[0]) to info[0].As<Napi::Number>().Uint32Value()
[/Nan::To<uint32_t>\((.+?)\)/g, '$1.As<Napi::Number>().Uint32Value()'],
// ex. Nan::To<int64_t>(info[0]) to info[0].As<Napi::Number>().Int64Value()
[/Nan::To<int64_t>\((.+?)\)/g, '$1.As<Napi::Number>().Int64Value()'],
// ex. Nan::To<float>(info[0]) to info[0].As<Napi::Number>().FloatValue()
[/Nan::To<float>\((.+?)\)/g, '$1.As<Napi::Number>().FloatValue()'],
// ex. Nan::To<double>(info[0]) to info[0].As<Napi::Number>().DoubleValue()
[/Nan::To<double>\((.+?)\)/g, '$1.As<Napi::Number>().DoubleValue()'],
[/Nan::New\((\w+)\)->HasInstance\((\w+)\)/g, '$2.InstanceOf($1.Value())'],
[/Nan::Has\(([^,]+),\s*/gm, '($1).Has('],
[/\.Has\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Has($1)'],
[/\.Has\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Has($1)'],
[/Nan::Get\(([^,]+),\s*/gm, '($1).Get('],
[/\.Get\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Get($1)'],
[/\.Get\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Get($1)'],
[/Nan::Set\(([^,]+),\s*/gm, '($1).Set('],
[/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'],
[/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'],
// ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer()
[/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'],
// ex. node::Buffer::Length(info[0]) to info[0].Length()
[/node::Buffer::Length\((.+?)\)/g, '$1.As<Napi::Buffer<char>>().Length()'],
// ex. node::Buffer::Data(info[0]) to info[0].Data()
[/node::Buffer::Data\((.+?)\)/g, '$1.As<Napi::Buffer<char>>().Data()'],
[/Nan::CopyBuffer\(/g, 'Napi::Buffer::Copy(env, '],
// Nan::AsyncQueueWorker(worker)
[/Nan::AsyncQueueWorker\((.+)\);/g, '$1.Queue();'],
[/Nan::(Undefined|Null|True|False)\(\)/g, 'env.$1()'],
// Nan::ThrowError(error) to Napi::Error::New(env, error).ThrowAsJavaScriptException()
[/([ ]*)return Nan::Throw(\w*?)Error\((.+?)\);/g, '$1Napi::$2Error::New(env, $3).ThrowAsJavaScriptException();\n$1return env.Null();'],
[/Nan::Throw(\w*?)Error\((.+?)\);\n(\s*)return;/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n$3return env.Null();'],
[/Nan::Throw(\w*?)Error\((.+?)\);/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n'],
// Nan::RangeError(error) to Napi::RangeError::New(env, error)
[/Nan::(\w*?)Error\((.+)\)/g, 'Napi::$1Error::New(env, $2)'],
[/Nan::Set\((.+?),\n* *(.+?),\n* *(.+?),\n* *(.+?)\)/g, '$1.Set($2, $3, $4)'],
[/Nan::(Escapable)?HandleScope\s+(\w+)\s*;/g, 'Napi::$1HandleScope $2(env);'],
[/Nan::(Escapable)?HandleScope/g, 'Napi::$1HandleScope'],
[/Nan::ForceSet\(([^,]+), ?/g, '$1->DefineProperty('],
[/\.ForceSet\(Napi::String::New\(env, "(\w+)"\),\s*?/g, '.DefineProperty("$1", '],
// [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ],
[/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('],
[/(.+)->Set\(/g, '$1.Set('],
[/Nan::Callback/g, 'Napi::FunctionReference'],
[/Nan::Persistent<Object>/g, 'Napi::ObjectReference'],
[/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'],
[/(\w+)\*\s+(\w+)\s*=\s*Nan::ObjectWrap::Unwrap<\w+>\(info\.This\(\)\);/g, '$1* $2 = this;'],
[/Nan::ObjectWrap::Unwrap<(\w+)>\((.*)\);/g, '$2.Unwrap<$1>();'],
[/Nan::NAN_METHOD_RETURN_TYPE/g, 'void'],
[/NAN_INLINE/g, 'inline'],
[/Nan::NAN_METHOD_ARGS_TYPE/g, 'const Napi::CallbackInfo&'],
[/NAN_METHOD\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'],
[/static\s*NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'],
[/NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'],
[/static\s*NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'],
[/NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'],
[/void Init\((v8::)*Local<(v8::)*Object> exports\)/g, 'Napi::Object Init(Napi::Env env, Napi::Object exports)'],
[/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'],
[/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'],
[/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'],
[/constructor_template/g, 'constructor'],
[/Nan::FunctionCallbackInfo<(v8::)?Value>[ ]*& [ ]*info\)[ ]*{\n*([ ]*)/gm, 'Napi::CallbackInfo& info) {\n$2Napi::Env env = info.Env();\n$2'],
[/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&\s*info\);/g, 'Napi::CallbackInfo& info);'],
[/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&/g, 'Napi::CallbackInfo&'],
[/Buffer::HasInstance\(([^)]+)\)/g, '$1.IsBuffer()'],
[/info\[(\d+)\]->/g, 'info[$1].'],
[/info\[([\w\d]+)\]->/g, 'info[$1].'],
[/info\.This\(\)->/g, 'info.This().'],
[/->Is(Object|String|Int32|Number)\(\)/g, '.Is$1()'],
[/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'],
[/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'],
// ex. Local<Value> to Napi::Value
[/v8::Local<v8::(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'],
[/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'],
// Declare an env in helper functions that take a Napi::Value
[/(\w+)\(Napi::Value (\w+)(,\s*[^()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4'],
// delete #include <node.h> and/or <v8.h>
[/#include +(<|")(?:node|nan).h("|>)/g, '#include $1napi.h$2\n#include $1uv.h$2'],
// NODE_MODULE to NODE_API_MODULE
[/NODE_MODULE/g, 'NODE_API_MODULE'],
[/Nan::/g, 'Napi::'],
[/nan.h/g, 'napi.h'],
// delete .FromJust()
[/\.FromJust\(\)/g, ''],
// delete .ToLocalCheck()
[/\.ToLocalChecked\(\)/g, ''],
[/^.*->SetInternalFieldCount\(.*$/gm, ''],
// replace using node; and/or using v8; to using Napi;
[/using (node|v8);/g, 'using Napi;'],
[/using namespace (node|Nan|v8);/g, 'using namespace Napi;'],
// delete using v8::Local;
[/using v8::Local;\n/g, ''],
// replace using v8::XXX; with using Napi::XXX
[/using v8::([A-Za-z]+);/g, 'using Napi::$1;']
];
const paths = listFiles(dir);
paths.forEach(function (dirEntry) {
const filename = dirEntry.split('\\').pop().split('/').pop();
// Check whether the file is a source file or a config file
// then execute function accordingly
const sourcePattern = /.+\.h|.+\.cc|.+\.cpp/;
if (sourcePattern.test(filename)) {
convertFile(dirEntry, SourceFileOperations);
} else if (ConfigFileOperations[filename] != null) {
convertFile(dirEntry, ConfigFileOperations[filename]);
}
});
function listFiles (dir, filelist) {
const files = fs.readdirSync(dir);
filelist = filelist || [];
files.forEach(function (file) {
if (file === 'node_modules') {
return;
}
if (fs.statSync(path.join(dir, file)).isDirectory()) {
filelist = listFiles(path.join(dir, file), filelist);
} else {
filelist.push(path.join(dir, file));
}
});
return filelist;
}
function convert (content, operations) {
for (let i = 0; i < operations.length; i++) {
const operation = operations[i];
content = content.replace(operation[0], operation[1]);
}
return content;
}
function convertFile (fileName, operations) {
fs.readFile(fileName, 'utf-8', function (err, file) {
if (err) throw err;
file = convert(file, operations);
fs.writeFile(fileName, file, function (err) {
if (err) throw err;
});
});
}

@ -1,79 +0,0 @@
#!/usr/bin/env node
const spawn = require('child_process').spawnSync;
const filesToCheck = '*.js';
const FORMAT_START = process.env.FORMAT_START || 'main';
const IS_WIN = process.platform === 'win32';
const ESLINT_PATH = IS_WIN ? 'node_modules\\.bin\\eslint.cmd' : 'node_modules/.bin/eslint';
function main (args) {
let fix = false;
while (args.length > 0) {
switch (args[0]) {
case '-f':
case '--fix':
fix = true;
break;
default:
}
args.shift();
}
// Check js files that change on unstaged file
const fileUnStaged = spawn(
'git',
['diff', '--name-only', FORMAT_START, filesToCheck],
{
encoding: 'utf-8'
}
);
// Check js files that change on staged file
const fileStaged = spawn(
'git',
['diff', '--name-only', '--cached', FORMAT_START, filesToCheck],
{
encoding: 'utf-8'
}
);
const options = [
...fileStaged.stdout.split('\n').filter((f) => f !== ''),
...fileUnStaged.stdout.split('\n').filter((f) => f !== '')
];
if (fix) {
options.push('--fix');
}
const result = spawn(ESLINT_PATH, [...options], {
encoding: 'utf-8'
});
if (result.error && result.error.errno === 'ENOENT') {
console.error('Eslint not found! Eslint is supposed to be found at ', ESLINT_PATH);
return 2;
}
if (result.status === 1) {
console.error('Eslint error:', result.stdout);
const fixCmd = 'npm run lint:fix';
console.error(`ERROR: please run "${fixCmd}" to format changes in your commit
Note that when running the command locally, please keep your local
main branch and working branch up to date with nodejs/node-addon-api
to exclude un-related complains.
Or you can run "env FORMAT_START=upstream/main ${fixCmd}".
Also fix JS files by yourself if necessary.`);
return 1;
}
if (result.stderr) {
console.error('Error running eslint:', result.stderr);
return 2;
}
}
if (require.main === module) {
process.exitCode = main(process.argv.slice(2));
}

1080
node_modules/tar/README.md generated vendored

File diff suppressed because it is too large Load Diff

@ -1,12 +0,0 @@
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null)
const { hasOwnProperty } = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s)) {
normalizeCache[s] = s.normalize('NFD')
}
return normalizeCache[s]
}

432
node_modules/tar/lib/pack.js generated vendored

@ -1,432 +0,0 @@
'use strict'
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const { Minipass } = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const fs = require('fs')
const path = require('path')
const warner = require('./warn-mixin.js')
const normPath = require('./normalize-windows-path.js')
const Pack = warner(class Pack extends Minipass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn)
}
this.portable = !!opt.portable
this.zip = null
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
}
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
}
if (this.portable) {
opt.gzip.portable = true
}
this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
}
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else {
this.on('drain', this[ONDRAIN])
}
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
}
[WRITE] (chunk) {
return super.write(chunk)
}
add (path) {
this.write(path)
return this
}
end (path) {
if (path) {
this.write(path)
}
this[ENDED] = true
this[PROCESS]()
return this
}
write (path) {
if (this[ENDED]) {
throw new Error('write after end')
}
if (path instanceof ReadEntry) {
this[ADDTARENTRY](path)
} else {
this[ADDFSENTRY](path)
}
return this.flowing
}
[ADDTARENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p.path))
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p)) {
p.resume()
} else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
this[PROCESS]()
}
[ADDFSENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p))
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er) {
this.emit('error', er)
} else {
this[ONSTAT](job, stat)
}
})
}
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat)) {
job.ignore = true
}
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er) {
return this.emit('error', er)
}
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
[PROCESS] () {
if (this[PROCESSING]) {
return
}
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
this[PROCESSING] = false
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip) {
this.zip.end(EOF)
} else {
super.write(EOF)
super.end()
}
}
}
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
}
[PROCESSJOB] (job) {
if (job.pending) {
return
}
if (job.entry) {
if (job === this[CURRENT] && !job.piped) {
this[PIPE](job)
}
return
}
if (!job.stat) {
if (this.statCache.has(job.absolute)) {
this[ONSTAT](job, this.statCache.get(job.absolute))
} else {
this[STAT](job)
}
}
if (!job.stat) {
return
}
// filtered out!
if (job.ignore) {
return
}
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute)) {
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
} else {
this[READDIR](job)
}
if (!job.readdir) {
return
}
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
if (job === this[CURRENT] && !job.piped) {
this[PIPE](job)
}
}
[ENTRYOPT] (job) {
return {
onwarn: (code, msg, data) => this.warn(code, msg, data),
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
}
[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
this.emit('error', er)
}
}
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry) {
this[CURRENT].entry.resume()
}
}
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
const source = job.entry
const zip = this.zip
if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk)) {
source.pause()
}
})
} else {
source.on('data', chunk => {
if (!super.write(chunk)) {
source.pause()
}
})
}
}
pause () {
if (this.zip) {
this.zip.pause()
}
return super.pause()
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
if (zip) {
source.on('data', chunk => {
zip.write(chunk)
})
} else {
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
}
Pack.Sync = PackSync
module.exports = Pack

552
node_modules/tar/lib/parse.js generated vendored

@ -1,552 +0,0 @@
'use strict'
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
//
// ignored entries get .resume() called on them straight away
const warner = require('./warn-mixin.js')
const Header = require('./header.js')
const EE = require('events')
const Yallist = require('yallist')
const maxMetaEntrySize = 1024 * 1024
const Entry = require('./read-entry.js')
const Pax = require('./pax.js')
const zlib = require('minizlib')
const { nextTick } = require('process')
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const CLOSESTREAM = Symbol('closeStream')
const noop = _ => true
module.exports = warner(class Parser extends EE {
constructor (opt) {
opt = opt || {}
super(opt)
this.file = opt.file || ''
// set to boolean false when an entry starts. 1024 bytes of \0
// is technically a valid tarball, albeit a boring one.
this[SAW_VALID_ENTRY] = null
// these BADARCHIVE errors can't be detected early. listen on DONE.
this.on(DONE, _ => {
if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
}
})
if (opt.ondone) {
this.on(DONE, opt.ondone)
} else {
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
})
}
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = (opt.file && (
opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
this[QUEUE] = new Yallist()
this[BUFFER] = null
this[READENTRY] = null
this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
this.on('end', () => this[CLOSESTREAM]())
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn)
}
if (typeof opt.onentry === 'function') {
this.on('entry', opt.onentry)
}
}
[CONSUMEHEADER] (chunk, position) {
if (this[SAW_VALID_ENTRY] === null) {
this[SAW_VALID_ENTRY] = false
}
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
if (header.nullBlock) {
if (this[SAW_NULL_BLOCK]) {
this[SAW_EOF] = true
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin') {
this[STATE] = 'header'
}
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid) {
this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header })
} else if (!header.path) {
this.warn('TAR_ENTRY_INVALID', 'path is required', { header })
} else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header })
} else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) {
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header })
} else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
// we do this for meta & ignored entries as well, because they
// are still valid tar, or else we wouldn't know to ignore them
if (!this[SAW_VALID_ENTRY]) {
if (entry.remain) {
// this might be the one!
const onend = () => {
if (!entry.invalid) {
this[SAW_VALID_ENTRY] = true
}
}
entry.on('end', onend)
} else {
this[SAW_VALID_ENTRY] = true
}
}
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
entry.resume()
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (entry.ignore) {
// probably valid, just not something we care about
this[EMIT]('ignoredEntry', entry)
this[STATE] = entry.remain ? 'ignore' : 'header'
entry.resume()
} else {
if (entry.remain) {
this[STATE] = 'body'
} else {
this[STATE] = 'header'
entry.end()
}
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else {
this[QUEUE].push(entry)
}
}
}
}
}
}
}
[CLOSESTREAM] () {
nextTick(() => this.emit('close'))
}
[PROCESSENTRY] (entry) {
let go = true
if (!entry) {
this[READENTRY] = null
go = false
} else if (Array.isArray(entry)) {
this.emit.apply(this, entry)
} else {
this[READENTRY] = entry
this.emit('entry', entry)
if (!entry.emittedEnd) {
entry.on('end', _ => this[NEXTENTRY]())
go = false
}
}
return go
}
[NEXTENTRY] () {
do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING]) {
this.emit('drain')
}
} else {
re.once('drain', _ => this.emit('drain'))
}
}
}
[CONSUMEBODY] (chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
entry.write(c)
if (!entry.blockRemain) {
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
return c.length
}
[CONSUMEMETA] (chunk, position) {
const entry = this[WRITEENTRY]
const ret = this[CONSUMEBODY](chunk, position)
// if we finished, then the entry is reset
if (!this[WRITEENTRY]) {
this[EMITMETA](entry)
}
return ret
}
[EMIT] (ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY]) {
this.emit(ev, data, extra)
} else {
this[QUEUE].push([ev, data, extra])
}
}
[EMITMETA] (entry) {
this[EMIT]('meta', this[META])
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
case 'GlobalExtendedHeader':
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
case 'NextFileHasLongLinkpath':
this[EX] = this[EX] || Object.create(null)
this[EX].linkpath = this[META].replace(/\0.*/, '')
break
/* istanbul ignore next */
default: throw new Error('unknown meta: ' + entry.type)
}
}
abort (error) {
this[ABORTED] = true
this.emit('abort', error)
// always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false })
}
write (chunk) {
if (this[ABORTED]) {
return
}
// first write, might be gzipped
const needSniff = this[UNZIP] === null ||
this.brotli === undefined && this[UNZIP] === false
if (needSniff && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
// look for gzip header
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false
}
}
const maybeBrotli = this.brotli === undefined
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true
} else {
this[BUFFER] = chunk
return true
}
} else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new Header(chunk.slice(0, 512))
this.brotli = false
} catch (_) {
this.brotli = true
}
}
}
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = this[UNZIP] === null
? new zlib.Unzip()
: new zlib.BrotliDecompress()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
}
this[WRITING] = true
if (this[UNZIP]) {
this[UNZIP].write(chunk)
} else {
this[CONSUMECHUNK](chunk)
}
this[WRITING] = false
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length) {
this[READENTRY].once('drain', _ => this.emit('drain'))
}
return ret
}
[BUFFERCONCAT] (c) {
if (c && !this[ABORTED]) {
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
}
[MAYBEEND] () {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, { entry })
if (this[BUFFER]) {
entry.write(this[BUFFER])
}
entry.end()
}
this[EMIT](DONE)
}
}
[CONSUMECHUNK] (chunk) {
if (this[CONSUMING]) {
this[BUFFERCONCAT](chunk)
} else if (!chunk && !this[BUFFER]) {
this[MAYBEEND]()
} else {
this[CONSUMING] = true
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
} else {
this[CONSUMECHUNKSUB](chunk)
}
while (this[BUFFER] &&
this[BUFFER].length >= 512 &&
!this[ABORTED] &&
!this[SAW_EOF]) {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
}
this[CONSUMING] = false
}
if (!this[BUFFER] || this[ENDED]) {
this[MAYBEEND]()
}
}
[CONSUMECHUNKSUB] (chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position)
break
case 'meta':
position += this[CONSUMEMETA](chunk, position)
break
/* istanbul ignore next */
default:
throw new Error('invalid state: ' + this[STATE])
}
}
if (position < length) {
if (this[BUFFER]) {
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
} else {
this[BUFFER] = chunk.slice(position)
}
}
}
end (chunk) {
if (!this[ABORTED]) {
if (this[UNZIP]) {
this[UNZIP].end(chunk)
} else {
this[ENDED] = true
if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
this.write(chunk)
}
}
}
})

@ -1,156 +0,0 @@
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const assert = require('assert')
const normalize = require('./normalize-unicode.js')
const stripSlashes = require('./strip-trailing-slashes.js')
const { join } = require('path')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length) {
path = join(set[set.length - 1], path)
}
set.push(path || '/')
return set
}, [])
return dirs
}
// functions currently running
const running = new Set()
// return the queues for each path the function cares about
// fn => {paths, dirs}
const getQueues = fn => {
const res = reservations.get(fn)
/* istanbul ignore if - unpossible */
if (!res) {
throw new Error('function does not have any path reservations')
}
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
const check = fn => {
const { paths, dirs } = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
// run the function if it's first in line and not already running
const run = fn => {
if (running.has(fn) || !check(fn)) {
return false
}
running.add(fn)
fn(() => clear(fn))
return true
}
const clear = fn => {
if (!running.has(fn)) {
return false
}
const { paths, dirs } = reservations.get(fn)
const next = new Set()
paths.forEach(path => {
const q = queues.get(path)
assert.equal(q[0], fn)
if (q.length === 1) {
queues.delete(path)
} else {
q.shift()
if (typeof q[0] === 'function') {
next.add(q[0])
} else {
q[0].forEach(fn => next.add(fn))
}
}
})
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1) {
queues.delete(dir)
} else if (q[0].size === 1) {
q.shift()
// must be a function or else the Set would've been reused
next.add(q[0])
} else {
q[0].delete(fn)
}
})
running.delete(fn)
next.forEach(fn => run(fn))
return true
}
const reserve = (paths, fn) => {
// collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return stripSlashes(join(normalize(p))).toLowerCase()
})
const dirs = new Set(
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
)
reservations.set(fn, { dirs, paths })
paths.forEach(path => {
const q = queues.get(path)
if (!q) {
queues.set(path, [fn])
} else {
q.push(fn)
}
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q) {
queues.set(dir, [new Set([fn])])
} else if (q[q.length - 1] instanceof Set) {
q[q.length - 1].add(fn)
} else {
q.push(new Set([fn]))
}
})
return run(fn)
}
return { check, reserve }
}

107
node_modules/tar/lib/read-entry.js generated vendored

@ -1,107 +0,0 @@
'use strict'
const { Minipass } = require('minipass')
const normPath = require('./normalize-windows-path.js')
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends Minipass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex
this.globalExtended = gex
this.header = header
this.startBlockSize = 512 * Math.ceil(header.size / 512)
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
this.path = normPath(header.path)
this.mode = header.mode
if (this.mode) {
this.mode = this.mode & 0o7777
}
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
if (ex) {
this[SLURP](ex)
}
if (gex) {
this[SLURP](gex, true)
}
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain) {
throw new Error('writing more to entry than is appropriate')
}
const r = this.remain
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore) {
return true
}
if (r >= writeLen) {
return super.write(data)
}
// r < writeLen
return super.write(data.slice(0, r))
}
[SLURP] (ex, global) {
for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path')) {
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
}
}
}
}

246
node_modules/tar/lib/replace.js generated vendored

@ -1,246 +0,0 @@
'use strict'
// tar -r
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
const Header = require('./header.js')
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file) {
throw new TypeError('file is required')
}
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}
if (!files || !Array.isArray(files) || !files.length) {
throw new TypeError('no files or directories specified')
}
files = Array.from(files)
return opt.sync ? replaceSync(opt, files)
: replace(opt, files, cb)
}
const replaceSync = (opt, files) => {
const p = new Pack.Sync(opt)
let threw = true
let fd
let position
try {
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT') {
fd = fs.openSync(opt.file, 'w+')
} else {
throw er
}
}
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
throw new Error('cannot append to compressed archives')
}
if (!bytes) {
break POSITION
}
}
const h = new Header(headBuf)
if (!h.cksumValid) {
break
}
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size) {
break
}
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache) {
opt.mtimeCache.set(h.path, h.mtime)
}
}
threw = false
streamSync(opt, p, position, fd, files)
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
addFilesSync(p, files)
}
const replace = (opt, files, cb) => {
files = Array.from(files)
const p = new Pack(opt)
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er) {
fs.close(fd, _ => cb_(er))
} else {
cb_(null, pos)
}
}
let position = 0
if (size === 0) {
return cb(null, 0)
}
let bufPos = 0
const headBuf = Buffer.alloc(512)
const onread = (er, bytes) => {
if (er) {
return cb(er)
}
bufPos += bytes
if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
}
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
return cb(new Error('cannot append to compressed archives'))
}
// truncated header
if (bufPos < 512) {
return cb(null, position)
}
const h = new Header(headBuf)
if (!h.cksumValid) {
return cb(null, position)
}
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > size) {
return cb(null, position)
}
position += entryBlockSize + 512
if (position >= size) {
return cb(null, position)
}
if (opt.mtimeCache) {
opt.mtimeCache.set(h.path, h.mtime)
}
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const promise = new Promise((resolve, reject) => {
p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
if (er) {
return reject(er)
}
fs.fstat(fd, (er, st) => {
if (er) {
return fs.close(fd, () => reject(er))
}
getPos(fd, st.size, (er, position) => {
if (er) {
return reject(er)
}
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
})
})
}
fs.open(opt.file, flag, onopen)
})
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.slice(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else {
p.add(file)
}
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.slice(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else {
p.add(file)
}
}
p.end()
}

923
node_modules/tar/lib/unpack.js generated vendored

@ -1,923 +0,0 @@
'use strict'
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
// but the path reservations are required to avoid race conditions where
// parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
const assert = require('assert')
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const mkdir = require('./mkdir.js')
const wc = require('./winchars.js')
const pathReservations = require('./path-reservations.js')
const stripAbsolutePath = require('./strip-absolute-path.js')
const normPath = require('./normalize-windows-path.js')
const stripSlash = require('./strip-trailing-slashes.js')
const normalize = require('./normalize-unicode.js')
const ONENTRY = Symbol('onEntry')
const CHECKFS = Symbol('checkFs')
const CHECKFS2 = Symbol('checkFs2')
const PRUNECACHE = Symbol('pruneCache')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = require('crypto')
const getFlag = require('./get-write-flag.js')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
const DEFAULT_MAX_DEPTH = 1024
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows) {
return fs.unlink(path, cb)
}
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.rename(path, name, er => {
if (er) {
return cb(er)
}
fs.unlink(name, cb)
})
}
/* istanbul ignore next */
const unlinkFileSync = path => {
if (!isWindows) {
return fs.unlinkSync(path)
}
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.renameSync(path, name)
fs.unlinkSync(name)
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => stripSlash(normPath(normalize(path)))
.toLowerCase()
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs)
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path)
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
cache.delete(path)
}
}
}
const dropCache = cache => {
for (const key of cache.keys()) {
cache.delete(key)
}
}
class Unpack extends Parser {
constructor (opt) {
if (!opt) {
opt = {}
}
opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
}
super(opt)
this[CHECKED_CWD] = false
this.reservations = pathReservations()
this.transform = typeof opt.transform === 'function' ? opt.transform : null
this.writable = true
this.readable = false
this[PENDING] = 0
this[ENDED] = false
this.dirCache = opt.dirCache || new Map()
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') {
throw new TypeError('cannot set owner without number uid and gid')
}
if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
}
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
}
// default true for root
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') {
this.preserveOwner = process.getuid && process.getuid() === 0
} else {
this.preserveOwner = !!opt.preserveOwner
}
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid() : null
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
// prevent excessively deep nesting of subfolders
// set to `Infinity` to remove this restriction
this.maxDepth = typeof opt.maxDepth === 'number'
? opt.maxDepth
: DEFAULT_MAX_DEPTH
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || isWindows
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer
// do not unpack over ANY files
this.keep = !!opt.keep
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink
this.cwd = normPath(path.resolve(opt.cwd || process.cwd()))
this.strip = +opt.strip || 0
// if we're not chmodding, then we don't need the process umask
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
this.on('entry', entry => this[ONENTRY](entry))
}
// a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn (code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
data.recoverable = false
}
return super.warn(code, msg, data)
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
}
}
[CHECKPATH] (entry) {
const p = normPath(entry.path)
const parts = p.split('/')
if (this.strip) {
if (parts.length < this.strip) {
return false
}
if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip) {
entry.linkpath = linkparts.slice(this.strip).join('/')
} else {
return false
}
}
parts.splice(0, this.strip)
entry.path = parts.join('/')
}
if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
entry,
path: p,
depth: parts.length,
maxDepth: this.maxDepth,
})
return false
}
if (!this.preservePaths) {
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
// strip off the root
const [root, stripped] = stripAbsolutePath(p)
if (root) {
entry.path = stripped
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
entry,
path: p,
})
}
}
if (path.isAbsolute(entry.path)) {
entry.absolute = normPath(path.resolve(entry.path))
} else {
entry.absolute = normPath(path.resolve(this.cwd, entry.path))
}
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* istanbul ignore if - defense in depth */
if (!this.preservePaths &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd,
})
return false
}
// an archive can set properties on the extraction directory, but it
// may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir') {
return false
}
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = path.win32.parse(entry.absolute)
entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length))
const { root: pRoot } = path.win32.parse(entry.path)
entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length))
}
return true
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry)) {
return entry.resume()
}
assert.equal(typeof entry.absolute, 'string')
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode) {
entry.mode = entry.mode | 0o700
}
// eslint-disable-next-line no-fallthrough
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry)
}
}
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError') {
this.emit('error', er)
} else {
this.warn('TAR_ENTRY_ERROR', er, { entry })
this[UNPEND]()
entry.resume()
}
}
[MKDIR] (dir, mode, cb) {
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
[DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
[UID] (entry) {
return uint32(this.uid, entry.uid, this.processUid)
}
[GID] (entry) {
return uint32(this.gid, entry.gid, this.processGid)
}
[FILE] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd) {
fs.close(stream.fd, () => {})
}
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
let actions = 1
const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd) {
fs.close(stream.fd, () => {})
}
this[ONERROR](er, entry)
fullyDone()
return
}
if (--actions === 0) {
fs.close(stream.fd, er => {
if (er) {
this[ONERROR](er, entry)
} else {
this[UNPEND]()
}
fullyDone()
})
}
}
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) {
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
if (this[DOCHOWN](entry)) {
actions++
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
done()
})
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
}
[DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
let actions = 1
const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
done()
})
}
[UNSUPPORTED] (entry) {
entry.unsupported = true
this.warn('TAR_ENTRY_UNSUPPORTED',
`unsupported entry type: ${entry.type}`, { entry })
entry.resume()
}
[SYMLINK] (entry, done) {
this[LINK](entry, entry.linkpath, 'symlink', done)
}
[HARDLINK] (entry, done) {
const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
this[LINK](entry, linkpath, 'link', done)
}
[PEND] () {
this[PENDING]++
}
[UNPEND] () {
this[PENDING]--
this[MAYBECLOSE]()
}
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
// check if a thing is there, and if so, try to clobber it
[CHECKFS] (entry) {
this[PEND]()
const paths = [entry.path]
if (entry.linkpath) {
paths.push(entry.linkpath)
}
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
[PRUNECACHE] (entry) {
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink') {
dropCache(this.dirCache)
} else if (entry.type !== 'Directory') {
pruneCache(this.dirCache, entry.absolute)
}
}
[CHECKFS2] (entry, fullyDone) {
this[PRUNECACHE](entry)
const done = er => {
this[PRUNECACHE](entry)
fullyDone(er)
}
const checkCwd = () => {
this[MKDIR](this.cwd, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
this[CHECKED_CWD] = true
start()
})
}
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
}
afterMakeParent()
}
const afterMakeParent = () => {
fs.lstat(entry.absolute, (lstatEr, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
done()
return
}
if (lstatEr || this[ISREUSABLE](entry, st)) {
return this[MAKEFS](null, entry, done)
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod) {
return afterChmod()
}
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
// not a dir, and not reusable
// don't remove if the cwd, we want that error
if (entry.absolute === this.cwd) {
return this[MAKEFS](null, entry, done)
}
unlinkFile(entry.absolute, er =>
this[MAKEFS](er, entry, done))
})
}
if (this[CHECKED_CWD]) {
start()
} else {
checkCwd()
}
}
[MAKEFS] (er, entry, done) {
if (er) {
this[ONERROR](er, entry)
done()
return
}
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry, done)
case 'Link':
return this[HARDLINK](entry, done)
case 'SymbolicLink':
return this[SYMLINK](entry, done)
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry, done)
}
}
[LINK] (entry, linkpath, link, done) {
// XXX: get the type ('symlink' or 'junction') for windows
fs[link](linkpath, entry.absolute, er => {
if (er) {
this[ONERROR](er, entry)
} else {
this[UNPEND]()
entry.resume()
}
done()
})
}
}
const callSync = fn => {
try {
return [null, fn()]
} catch (er) {
return [er, null]
}
}
class UnpackSync extends Unpack {
[MAKEFS] (er, entry) {
return super[MAKEFS](er, entry, () => {})
}
[CHECKFS] (entry) {
this[PRUNECACHE](entry)
if (!this[CHECKED_CWD]) {
const er = this[MKDIR](this.cwd, this.dmode)
if (er) {
return this[ONERROR](er, entry)
}
this[CHECKED_CWD] = true
}
// don't bother to make the parent if the current entry is the cwd,
// we've already checked it.
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode)
if (mkParent) {
return this[ONERROR](mkParent, entry)
}
}
}
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
return this[SKIP](entry)
}
if (lstatEr || this[ISREUSABLE](entry, st)) {
return this[MAKEFS](null, entry)
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
}
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
}
// not a dir, and not reusable.
// don't remove if it's the cwd, since we want that error.
const [er] = entry.absolute === this.cwd ? []
: callSync(() => unlinkFileSync(entry.absolute))
this[MAKEFS](er, entry)
}
[FILE] (entry, done) {
const mode = entry.mode & 0o7777 || this.fmode
const oner = er => {
let closeError
try {
fs.closeSync(fd)
} catch (e) {
closeError = e
}
if (er || closeError) {
this[ONERROR](er || closeError, entry)
}
done()
}
let fd
try {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
} catch (er) {
return oner(er)
}
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
try {
fs.fchownSync(fd, uid, gid)
} catch (fchowner) {
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
}
oner(er)
})
}
[DIRECTORY] (entry, done) {
const mode = entry.mode & 0o7777 || this.dmode
const er = this[MKDIR](entry.absolute, mode)
if (er) {
this[ONERROR](er, entry)
done()
return
}
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
}
done()
entry.resume()
}
[MKDIR] (dir, mode) {
try {
return mkdir.sync(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
})
} catch (er) {
return er
}
}
[LINK] (entry, linkpath, link, done) {
try {
fs[link + 'Sync'](linkpath, entry.absolute)
done()
entry.resume()
} catch (er) {
return this[ONERROR](er, entry)
}
}
}
Unpack.Sync = UnpackSync
module.exports = Unpack

40
node_modules/tar/lib/update.js generated vendored

@ -1,40 +0,0 @@
'use strict'
// tar -u
const hlo = require('./high-level-opt.js')
const r = require('./replace.js')
// just call tar.r with the filter and mtimeCache
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file) {
throw new TypeError('file is required')
}
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}
if (!files || !Array.isArray(files) || !files.length) {
throw new TypeError('no files or directories specified')
}
files = Array.from(files)
mtimeFilter(opt)
return r(opt, files, cb)
}
const mtimeFilter = opt => {
const filter = opt.filter
if (!opt.mtimeCache) {
opt.mtimeCache = new Map()
}
opt.filter = filter ? (path, stat) =>
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}

546
node_modules/tar/lib/write-entry.js generated vendored

@ -1,546 +0,0 @@
'use strict'
const { Minipass } = require('minipass')
const Pax = require('./pax.js')
const Header = require('./header.js')
const fs = require('fs')
const path = require('path')
const normPath = require('./normalize-windows-path.js')
const stripSlash = require('./strip-trailing-slashes.js')
const prefixPath = (path, prefix) => {
if (!prefix) {
return normPath(path)
}
path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = require('./warn-mixin.js')
const winchars = require('./winchars.js')
const stripAbsolutePath = require('./strip-absolute-path.js')
const modeFix = require('./mode-fix.js')
const WriteEntry = warner(class WriteEntry extends Minipass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string') {
throw new TypeError('path is required')
}
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
this.fd = null
this.blockLen = null
this.blockRemain = null
this.buf = null
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn)
}
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
this.win32 = !!opt.win32 || process.platform === 'win32'
if (this.win32) {
// force the \ to / normalization, since we might not *actually*
// be on windows, but want \ to be considered a path separator.
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
}
this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
if (this.path === '') {
this.path = './'
}
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
}
if (this.statCache.has(this.absolute)) {
this[ONLSTAT](this.statCache.get(this.absolute))
} else {
this[LSTAT]()
}
}
emit (ev, ...data) {
if (ev === 'error') {
this[HAD_ERROR] = true
}
return super.emit(ev, ...data)
}
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er) {
return this.emit('error', er)
}
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile()) {
stat.size = 0
}
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
[HEADER] () {
if (this.type === 'Directory' && this.portable) {
this.noMtime = true
}
this.header = new Header({
path: this[PREFIX](this.path),
// only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
}
super.write(this.header.block)
}
[DIRECTORY] () {
if (this.path.slice(-1) !== '/') {
this.path += '/'
}
this.stat.size = 0
this[HEADER]()
this.end()
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er) {
return this.emit('error', er)
}
this[ONREADLINK](linkpath)
})
}
[ONREADLINK] (linkpath) {
this.linkpath = normPath(linkpath)
this[HEADER]()
this.end()
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0) {
return this[HARDLINK](linkpath)
}
}
this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]()
if (this.stat.size === 0) {
return this.end()
}
this[OPENFILE]()
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er) {
return this.emit('error', er)
}
this[ONOPENFILE](fd)
})
}
[ONOPENFILE] (fd) {
this.fd = fd
if (this[HAD_ERROR]) {
return this[CLOSE]()
}
this.blockLen = 512 * Math.ceil(this.stat.size / 512)
this.blockRemain = this.blockLen
const bufLen = Math.min(this.blockLen, this.maxReadSize)
this.buf = Buffer.allocUnsafe(bufLen)
this.offset = 0
this.pos = 0
this.remain = this.stat.size
this.length = this.buf.length
this[READ]()
}
[READ] () {
const { fd, buf, offset, length, pos } = this
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
}
[CLOSE] (cb) {
fs.close(this.fd, cb)
}
[ONREAD] (bytesRead) {
if (bytesRead <= 0 && this.remain > 0) {
const er = new Error('encountered unexpected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
if (bytesRead > this.remain) {
const er = new Error('did not encounter expected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
// null out the rest of the buffer, if we could fit the block padding
// at the end of this loop, we've incremented bytesRead and this.remain
// to be incremented up to the blockRemain level, as if we had expected
// to get a null-padded file, and read it until the end. then we will
// decrement both remain and blockRemain by bytesRead, and know that we
// reached the expected EOF, without any null buffer to append.
if (bytesRead === this.remain) {
for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
this.buf[i + this.offset] = 0
bytesRead++
this.remain++
}
}
const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
const flushed = this.write(writeBuf)
if (!flushed) {
this[AWAITDRAIN](() => this[ONDRAIN]())
} else {
this[ONDRAIN]()
}
}
[AWAITDRAIN] (cb) {
this.once('drain', cb)
}
write (writeBuf) {
if (this.blockRemain < writeBuf.length) {
const er = new Error('writing more data than expected')
er.path = this.absolute
return this.emit('error', er)
}
this.remain -= writeBuf.length
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
}
[ONDRAIN] () {
if (!this.remain) {
if (this.blockRemain) {
super.write(Buffer.alloc(this.blockRemain))
}
return this[CLOSE](er => er ? this.emit('error', er) : this.end())
}
if (this.offset >= this.length) {
// if we only have a smaller bit left to read, alloc a smaller buffer
// otherwise, keep it the same length it was before.
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
this.offset = 0
}
this.length = this.buf.length - this.offset
this[READ]()
}
})
class WriteEntrySync extends WriteEntry {
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
}
[SYMLINK] () {
this[ONREADLINK](fs.readlinkSync(this.absolute))
}
[OPENFILE] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
}
[READ] () {
let threw = true
try {
const { fd, buf, offset, length, pos } = this
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](bytesRead)
threw = false
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try {
this[CLOSE](() => {})
} catch (er) {}
}
}
}
[AWAITDRAIN] (cb) {
cb()
}
[CLOSE] (cb) {
fs.closeSync(this.fd)
cb()
}
}
const WriteEntryTar = warner(class WriteEntryTar extends Minipass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable) {
this.noMtime = true
}
this.prefix = opt.prefix || null
this.path = normPath(readEntry.path)
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
if (typeof opt.onwarn === 'function') {
this.on('warn', opt.onwarn)
}
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
this.remain = readEntry.size
this.blockRemain = readEntry.startBlockSize
this.header = new Header({
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
}
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
gid: this.portable ? null : this.gid,
mtime: this.noMtime ? null : this.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
}
super.write(this.header.block)
readEntry.pipe(this)
}
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain) {
throw new Error('writing more to entry than is appropriate')
}
this.blockRemain -= writeLen
return super.write(data)
}
end () {
if (this.blockRemain) {
super.write(Buffer.alloc(this.blockRemain))
}
return super.end()
}
})
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = WriteEntry

70
node_modules/tar/package.json generated vendored

@ -1,70 +0,0 @@
{
"author": "GitHub Inc.",
"name": "tar",
"description": "tar for node",
"version": "6.2.1",
"repository": {
"type": "git",
"url": "https://github.com/isaacs/node-tar.git"
},
"scripts": {
"genparse": "node scripts/generate-parse-fixtures.js",
"snap": "tap",
"test": "tap"
},
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.11.0",
"chmodr": "^1.2.0",
"end-of-stream": "^1.4.3",
"events-to-array": "^2.0.3",
"mutate-fs": "^2.1.1",
"nock": "^13.2.9",
"rimraf": "^3.0.2",
"tap": "^16.0.1"
},
"license": "ISC",
"engines": {
"node": ">=10"
},
"files": [
"bin/",
"lib/",
"index.js"
],
"tap": {
"coverage-map": "map.js",
"timeout": 0,
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.11.0",
"content": "scripts/template-oss",
"engines": ">=10",
"distPaths": [
"index.js"
],
"allowPaths": [
"/index.js"
],
"ciVersions": [
"10.x",
"12.x",
"14.x",
"16.x",
"18.x"
]
}
}

11708
package-lock.json generated

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save