master
Varun 1 year ago
parent 33edd91c61
commit 0505300de9

17
node_modules/.bin/color-support generated vendored

@ -1 +1,16 @@
../color-support/bin.js #!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../color-support/bin.js" "$@"
else
exec node "$basedir/../color-support/bin.js" "$@"
fi

17
node_modules/.bin/node-pre-gyp generated vendored

@ -1 +1,16 @@
../@mapbox/node-pre-gyp/bin/node-pre-gyp #!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@"
else
exec node "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@"
fi

17
node_modules/.bin/nopt generated vendored

@ -1 +1,16 @@
../nopt/bin/nopt.js #!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@"
else
exec node "$basedir/../nopt/bin/nopt.js" "$@"
fi

126
node_modules/.package-lock.json generated vendored

@ -2634,9 +2634,10 @@
} }
}, },
"node_modules/@mapbox/node-pre-gyp": { "node_modules/@mapbox/node-pre-gyp": {
"version": "1.0.10", "version": "1.0.11",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
"integrity": "sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
"license": "BSD-3-Clause",
"dependencies": { "dependencies": {
"detect-libc": "^2.0.0", "detect-libc": "^2.0.0",
"https-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0",
@ -2652,21 +2653,11 @@
"node-pre-gyp": "bin/node-pre-gyp" "node-pre-gyp": "bin/node-pre-gyp"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
"integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
"license": "MIT",
"dependencies": { "dependencies": {
"semver": "^6.0.0" "semver": "^6.0.0"
}, },
@ -2678,20 +2669,19 @@
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": {
"version": "6.3.0", "version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"license": "ISC",
"bin": { "bin": {
"semver": "bin/semver.js" "semver": "bin/semver.js"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/semver": { "node_modules/@mapbox/node-pre-gyp/node_modules/semver": {
"version": "7.3.8", "version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"dependencies": { "license": "ISC",
"lru-cache": "^6.0.0"
},
"bin": { "bin": {
"semver": "bin/semver.js" "semver": "bin/semver.js"
}, },
@ -2699,11 +2689,6 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/@popperjs/core": { "node_modules/@popperjs/core": {
"version": "2.11.6", "version": "2.11.6",
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.6.tgz", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.6.tgz",
@ -3941,7 +3926,8 @@
"node_modules/aproba": { "node_modules/aproba": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
"integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==",
"license": "ISC"
}, },
"node_modules/archiver": { "node_modules/archiver": {
"version": "5.3.1", "version": "5.3.1",
@ -4021,6 +4007,8 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"delegates": "^1.0.0", "delegates": "^1.0.0",
"readable-stream": "^3.6.0" "readable-stream": "^3.6.0"
@ -4255,12 +4243,13 @@
] ]
}, },
"node_modules/bcrypt": { "node_modules/bcrypt": {
"version": "5.1.0", "version": "5.1.1",
"resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.0.tgz", "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
"integrity": "sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==", "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
"hasInstallScript": true, "hasInstallScript": true,
"license": "MIT",
"dependencies": { "dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10", "@mapbox/node-pre-gyp": "^1.0.11",
"node-addon-api": "^5.0.0" "node-addon-api": "^5.0.0"
}, },
"engines": { "engines": {
@ -4660,6 +4649,7 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
"license": "ISC",
"engines": { "engines": {
"node": ">=10" "node": ">=10"
} }
@ -4798,6 +4788,7 @@
"version": "1.1.3", "version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
"license": "ISC",
"bin": { "bin": {
"color-support": "bin.js" "color-support": "bin.js"
} }
@ -4873,7 +4864,8 @@
"node_modules/console-control-strings": { "node_modules/console-control-strings": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
"integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==",
"license": "ISC"
}, },
"node_modules/content-disposition": { "node_modules/content-disposition": {
"version": "0.5.4", "version": "0.5.4",
@ -5207,7 +5199,8 @@
"node_modules/delegates": { "node_modules/delegates": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==",
"license": "MIT"
}, },
"node_modules/denque": { "node_modules/denque": {
"version": "1.5.1", "version": "1.5.1",
@ -5240,9 +5233,10 @@
} }
}, },
"node_modules/detect-libc": { "node_modules/detect-libc": {
"version": "2.0.1", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
"license": "Apache-2.0",
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@ -6735,6 +6729,7 @@
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
"license": "ISC",
"dependencies": { "dependencies": {
"minipass": "^3.0.0" "minipass": "^3.0.0"
}, },
@ -6746,6 +6741,7 @@
"version": "3.3.6", "version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"license": "ISC",
"dependencies": { "dependencies": {
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
@ -6756,7 +6752,8 @@
"node_modules/fs-minipass/node_modules/yallist": { "node_modules/fs-minipass/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/fs.realpath": { "node_modules/fs.realpath": {
"version": "1.0.0", "version": "1.0.0",
@ -6847,6 +6844,8 @@
"version": "3.0.2", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"aproba": "^1.0.3 || ^2.0.0", "aproba": "^1.0.3 || ^2.0.0",
"color-support": "^1.1.2", "color-support": "^1.1.2",
@ -7443,7 +7442,8 @@
"node_modules/has-unicode": { "node_modules/has-unicode": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
"integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==",
"license": "ISC"
}, },
"node_modules/hashlru": { "node_modules/hashlru": {
"version": "2.3.0", "version": "2.3.0",
@ -8618,25 +8618,19 @@
} }
}, },
"node_modules/minipass": { "node_modules/minipass": {
"version": "4.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dependencies": { "license": "ISC",
"yallist": "^4.0.0"
},
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/minipass/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/minizlib": { "node_modules/minizlib": {
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
"license": "MIT",
"dependencies": { "dependencies": {
"minipass": "^3.0.0", "minipass": "^3.0.0",
"yallist": "^4.0.0" "yallist": "^4.0.0"
@ -8649,6 +8643,7 @@
"version": "3.3.6", "version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"license": "ISC",
"dependencies": { "dependencies": {
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
@ -8659,7 +8654,8 @@
"node_modules/minizlib/node_modules/yallist": { "node_modules/minizlib/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/mkdirp": { "node_modules/mkdirp": {
"version": "1.0.4", "version": "1.0.4",
@ -9061,9 +9057,10 @@
} }
}, },
"node_modules/node-addon-api": { "node_modules/node-addon-api": {
"version": "5.0.0", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.0.0.tgz", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==" "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
"license": "MIT"
}, },
"node_modules/node-cron": { "node_modules/node-cron": {
"version": "3.0.2", "version": "3.0.2",
@ -9196,6 +9193,7 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
"integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
"license": "ISC",
"dependencies": { "dependencies": {
"abbrev": "1" "abbrev": "1"
}, },
@ -9218,6 +9216,8 @@
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"are-we-there-yet": "^2.0.0", "are-we-there-yet": "^2.0.0",
"console-control-strings": "^1.1.0", "console-control-strings": "^1.1.0",
@ -10913,7 +10913,8 @@
"node_modules/set-blocking": { "node_modules/set-blocking": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
"license": "ISC"
}, },
"node_modules/set-cookie-parser": { "node_modules/set-cookie-parser": {
"version": "2.5.1", "version": "2.5.1",
@ -11395,13 +11396,14 @@
} }
}, },
"node_modules/tar": { "node_modules/tar": {
"version": "6.1.13", "version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"license": "ISC",
"dependencies": { "dependencies": {
"chownr": "^2.0.0", "chownr": "^2.0.0",
"fs-minipass": "^2.0.0", "fs-minipass": "^2.0.0",
"minipass": "^4.0.0", "minipass": "^5.0.0",
"minizlib": "^2.1.1", "minizlib": "^2.1.1",
"mkdirp": "^1.0.3", "mkdirp": "^1.0.3",
"yallist": "^4.0.0" "yallist": "^4.0.0"
@ -11438,7 +11440,8 @@
"node_modules/tar/node_modules/yallist": { "node_modules/tar/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/teeny-request": { "node_modules/teeny-request": {
"version": "8.0.3", "version": "8.0.3",
@ -12373,6 +12376,7 @@
"version": "1.1.5", "version": "1.1.5",
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
"integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
"license": "ISC",
"dependencies": { "dependencies": {
"string-width": "^1.0.2 || 2 || 3 || 4" "string-width": "^1.0.2 || 2 || 3 || 4"
} }

@ -1,5 +1,8 @@
# node-pre-gyp changelog # node-pre-gyp changelog
## 1.0.11
- Fixes dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
## 1.0.10 ## 1.0.10
- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906) - Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)

@ -1 +1,16 @@
../semver/bin/semver.js #!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

@ -1,166 +0,0 @@
# lru cache
A cache object that deletes the least-recently-used items.
[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache)
## Installation:
```javascript
npm install lru-cache --save
```
## Usage:
```javascript
var LRU = require("lru-cache")
, options = { max: 500
, length: function (n, key) { return n * 2 + key.length }
, dispose: function (key, n) { n.close() }
, maxAge: 1000 * 60 * 60 }
, cache = new LRU(options)
, otherCache = new LRU(50) // sets just the max size
cache.set("key", "value")
cache.get("key") // "value"
// non-string keys ARE fully supported
// but note that it must be THE SAME object, not
// just a JSON-equivalent object.
var someObject = { a: 1 }
cache.set(someObject, 'a value')
// Object keys are not toString()-ed
cache.set('[object Object]', 'a different value')
assert.equal(cache.get(someObject), 'a value')
// A similar object with same keys/values won't work,
// because it's a different object identity
assert.equal(cache.get({ a: 1 }), undefined)
cache.reset() // empty the cache
```
If you put more stuff in it, then items will fall out.
If you try to put an oversized thing in it, then it'll fall out right
away.
## Options
* `max` The maximum size of the cache, checked by applying the length
function to all values in the cache. Not setting this is kind of
silly, since that's the whole purpose of this lib, but it defaults
to `Infinity`. Setting it to a non-number or negative number will
throw a `TypeError`. Setting it to 0 makes it be `Infinity`.
* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
as they age, but if you try to get an item that is too old, it'll
drop it and return undefined instead of giving it to you.
Setting this to a negative value will make everything seem old!
Setting it to a non-number will throw a `TypeError`.
* `length` Function that is used to calculate the length of stored
items. If you're storing strings or buffers, then you probably want
to do something like `function(n, key){return n.length}`. The default is
`function(){return 1}`, which is fine if you want to store `max`
like-sized things. The item is passed as the first argument, and
the key is passed as the second argumnet.
* `dispose` Function that is called on items when they are dropped
from the cache. This can be handy if you want to close file
descriptors or do other cleanup tasks when items are no longer
accessible. Called with `key, value`. It's called *before*
actually removing the item from the internal cache, so if you want
to immediately put it back in, you'll have to do that in a
`nextTick` or `setTimeout` callback or it won't do anything.
* `stale` By default, if you set a `maxAge`, it'll only actually pull
stale items out of the cache when you `get(key)`. (That is, it's
not pre-emptively doing a `setTimeout` or anything.) If you set
`stale:true`, it'll return the stale value before deleting it. If
you don't set this, then it'll return `undefined` when you try to
get a stale entry, as if it had already been deleted.
* `noDisposeOnSet` By default, if you set a `dispose()` method, then
it'll be called whenever a `set()` operation overwrites an existing
key. If you set this option, `dispose()` will only be called when a
key falls out of the cache, not when it is overwritten.
* `updateAgeOnGet` When using time-expiring entries with `maxAge`,
setting this to `true` will make each item's effective time update
to the current time whenever it is retrieved from cache, causing it
to not expire. (It can still fall out of cache based on recency of
use, of course.)
## API
* `set(key, value, maxAge)`
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
They do what you think. `maxAge` is optional and overrides the
cache `maxAge` option if provided.
If the key is not found, `get()` will return `undefined`.
The key and val can be any value.
* `peek(key)`
Returns the key value (or `undefined` if not found) without
updating the "recently used"-ness of the key.
(If you find yourself using this a lot, you *might* be using the
wrong sort of data structure, but there are some use cases where
it's handy.)
* `del(key)`
Deletes a key out of the cache.
* `reset()`
Clear the cache entirely, throwing away all values.
* `has(key)`
Check if a key is in the cache, without updating the recent-ness
or deleting it for being stale.
* `forEach(function(value,key,cache), [thisp])`
Just like `Array.prototype.forEach`. Iterates over all the keys
in the cache, in order of recent-ness. (Ie, more recently used
items are iterated over first.)
* `rforEach(function(value,key,cache), [thisp])`
The same as `cache.forEach(...)` but items are iterated over in
reverse order. (ie, less recently used items are iterated over
first.)
* `keys()`
Return an array of the keys in the cache.
* `values()`
Return an array of the values in the cache.
* `length`
Return total length of objects in cache taking into account
`length` options function.
* `itemCount`
Return total quantity of objects currently in cache. Note, that
`stale` (see options) items are returned as part of this item
count.
* `dump()`
Return an array of the cache entries ready for serialization and usage
with 'destinationCache.load(arr)`.
* `load(cacheEntriesArray)`
Loads another cache entries array, obtained with `sourceCache.dump()`,
into the cache. The destination cache is reset before loading new entries
* `prune()`
Manually iterates over the entire cache proactively pruning old entries

@ -1,334 +0,0 @@
'use strict'
// A linked list to keep track of recently-used-ness
const Yallist = require('yallist')
const MAX = Symbol('max')
const LENGTH = Symbol('length')
const LENGTH_CALCULATOR = Symbol('lengthCalculator')
const ALLOW_STALE = Symbol('allowStale')
const MAX_AGE = Symbol('maxAge')
const DISPOSE = Symbol('dispose')
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
const LRU_LIST = Symbol('lruList')
const CACHE = Symbol('cache')
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
const naiveLength = () => 1
// lruList is a yallist where the head is the youngest
// item, and the tail is the oldest. the list contains the Hit
// objects as the entries.
// Each Hit object has a reference to its Yallist.Node. This
// never changes.
//
// cache is a Map (or PseudoMap) that matches the keys to
// the Yallist.Node object.
class LRUCache {
constructor (options) {
if (typeof options === 'number')
options = { max: options }
if (!options)
options = {}
if (options.max && (typeof options.max !== 'number' || options.max < 0))
throw new TypeError('max must be a non-negative number')
// Kind of weird to have a default max of Infinity, but oh well.
const max = this[MAX] = options.max || Infinity
const lc = options.length || naiveLength
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
this[ALLOW_STALE] = options.stale || false
if (options.maxAge && typeof options.maxAge !== 'number')
throw new TypeError('maxAge must be a number')
this[MAX_AGE] = options.maxAge || 0
this[DISPOSE] = options.dispose
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
this.reset()
}
// resize the cache when the max changes.
set max (mL) {
if (typeof mL !== 'number' || mL < 0)
throw new TypeError('max must be a non-negative number')
this[MAX] = mL || Infinity
trim(this)
}
get max () {
return this[MAX]
}
set allowStale (allowStale) {
this[ALLOW_STALE] = !!allowStale
}
get allowStale () {
return this[ALLOW_STALE]
}
set maxAge (mA) {
if (typeof mA !== 'number')
throw new TypeError('maxAge must be a non-negative number')
this[MAX_AGE] = mA
trim(this)
}
get maxAge () {
return this[MAX_AGE]
}
// resize the cache when the lengthCalculator changes.
set lengthCalculator (lC) {
if (typeof lC !== 'function')
lC = naiveLength
if (lC !== this[LENGTH_CALCULATOR]) {
this[LENGTH_CALCULATOR] = lC
this[LENGTH] = 0
this[LRU_LIST].forEach(hit => {
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
this[LENGTH] += hit.length
})
}
trim(this)
}
get lengthCalculator () { return this[LENGTH_CALCULATOR] }
get length () { return this[LENGTH] }
get itemCount () { return this[LRU_LIST].length }
rforEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].tail; walker !== null;) {
const prev = walker.prev
forEachStep(this, fn, walker, thisp)
walker = prev
}
}
forEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].head; walker !== null;) {
const next = walker.next
forEachStep(this, fn, walker, thisp)
walker = next
}
}
keys () {
return this[LRU_LIST].toArray().map(k => k.key)
}
values () {
return this[LRU_LIST].toArray().map(k => k.value)
}
reset () {
if (this[DISPOSE] &&
this[LRU_LIST] &&
this[LRU_LIST].length) {
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
}
this[CACHE] = new Map() // hash of items by key
this[LRU_LIST] = new Yallist() // list of items in order of use recency
this[LENGTH] = 0 // length of items in the list
}
dump () {
return this[LRU_LIST].map(hit =>
isStale(this, hit) ? false : {
k: hit.key,
v: hit.value,
e: hit.now + (hit.maxAge || 0)
}).toArray().filter(h => h)
}
dumpLru () {
return this[LRU_LIST]
}
set (key, value, maxAge) {
maxAge = maxAge || this[MAX_AGE]
if (maxAge && typeof maxAge !== 'number')
throw new TypeError('maxAge must be a number')
const now = maxAge ? Date.now() : 0
const len = this[LENGTH_CALCULATOR](value, key)
if (this[CACHE].has(key)) {
if (len > this[MAX]) {
del(this, this[CACHE].get(key))
return false
}
const node = this[CACHE].get(key)
const item = node.value
// dispose of the old one before overwriting
// split out into 2 ifs for better coverage tracking
if (this[DISPOSE]) {
if (!this[NO_DISPOSE_ON_SET])
this[DISPOSE](key, item.value)
}
item.now = now
item.maxAge = maxAge
item.value = value
this[LENGTH] += len - item.length
item.length = len
this.get(key)
trim(this)
return true
}
const hit = new Entry(key, value, len, now, maxAge)
// oversized objects fall out of cache automatically.
if (hit.length > this[MAX]) {
if (this[DISPOSE])
this[DISPOSE](key, value)
return false
}
this[LENGTH] += hit.length
this[LRU_LIST].unshift(hit)
this[CACHE].set(key, this[LRU_LIST].head)
trim(this)
return true
}
has (key) {
if (!this[CACHE].has(key)) return false
const hit = this[CACHE].get(key).value
return !isStale(this, hit)
}
get (key) {
return get(this, key, true)
}
peek (key) {
return get(this, key, false)
}
pop () {
const node = this[LRU_LIST].tail
if (!node)
return null
del(this, node)
return node.value
}
del (key) {
del(this, this[CACHE].get(key))
}
load (arr) {
// reset the cache
this.reset()
const now = Date.now()
// A previous serialized cache has the most recent items first
for (let l = arr.length - 1; l >= 0; l--) {
const hit = arr[l]
const expiresAt = hit.e || 0
if (expiresAt === 0)
// the item was created without expiration in a non aged cache
this.set(hit.k, hit.v)
else {
const maxAge = expiresAt - now
// dont add already expired items
if (maxAge > 0) {
this.set(hit.k, hit.v, maxAge)
}
}
}
}
prune () {
this[CACHE].forEach((value, key) => get(this, key, false))
}
}
const get = (self, key, doUse) => {
const node = self[CACHE].get(key)
if (node) {
const hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
return undefined
} else {
if (doUse) {
if (self[UPDATE_AGE_ON_GET])
node.value.now = Date.now()
self[LRU_LIST].unshiftNode(node)
}
}
return hit.value
}
}
const isStale = (self, hit) => {
if (!hit || (!hit.maxAge && !self[MAX_AGE]))
return false
const diff = Date.now() - hit.now
return hit.maxAge ? diff > hit.maxAge
: self[MAX_AGE] && (diff > self[MAX_AGE])
}
const trim = self => {
if (self[LENGTH] > self[MAX]) {
for (let walker = self[LRU_LIST].tail;
self[LENGTH] > self[MAX] && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
const prev = walker.prev
del(self, walker)
walker = prev
}
}
}
const del = (self, node) => {
if (node) {
const hit = node.value
if (self[DISPOSE])
self[DISPOSE](hit.key, hit.value)
self[LENGTH] -= hit.length
self[CACHE].delete(hit.key)
self[LRU_LIST].removeNode(node)
}
}
class Entry {
constructor (key, value, length, now, maxAge) {
this.key = key
this.value = value
this.length = length
this.now = now
this.maxAge = maxAge || 0
}
}
const forEachStep = (self, fn, node, thisp) => {
let hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
hit = undefined
}
if (hit)
fn.call(thisp, hit.value, hit.key, self)
}
module.exports = LRUCache

@ -1,34 +0,0 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
"version": "6.0.0",
"author": "Isaac Z. Schlueter <i@izs.me>",
"keywords": [
"mru",
"lru",
"cache"
],
"scripts": {
"test": "tap",
"snap": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
},
"main": "index.js",
"repository": "git://github.com/isaacs/node-lru-cache.git",
"devDependencies": {
"benchmark": "^2.1.4",
"tap": "^14.10.7"
},
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"files": [
"index.js"
],
"engines": {
"node": ">=10"
}
}

@ -1 +1,16 @@
../semver/bin/semver.js #!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*)
if command -v cygpath > /dev/null 2>&1; then
basedir=`cygpath -w "$basedir"`
fi
;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

@ -1,70 +0,0 @@
# changes log
## 6.2.0
* Coerce numbers to strings when passed to semver.coerce()
* Add `rtl` option to coerce from right to left
## 6.1.3
* Handle X-ranges properly in includePrerelease mode
## 6.1.2
* Do not throw when testing invalid version strings
## 6.1.1
* Add options support for semver.coerce()
* Handle undefined version passed to Range.test
## 6.1.0
* Add semver.compareBuild function
* Support `*` in semver.intersects
## 6.0
* Fix `intersects` logic.
This is technically a bug fix, but since it is also a change to behavior
that may require users updating their code, it is marked as a major
version increment.
## 5.7
* Add `minVersion` method
## 5.6
* Move boolean `loose` param to an options object, with
backwards-compatibility protection.
* Add ability to opt out of special prerelease version handling with
the `includePrerelease` option flag.
## 5.5
* Add version coercion capabilities
## 5.4
* Add intersection checking
## 5.3
* Add `minSatisfying` method
## 5.2
* Add `prerelease(v)` that returns prerelease components
## 5.1
* Add Backus-Naur for ranges
* Remove excessively cute inspection methods
## 5.0
* Remove AMD/Browserified build artifacts
* Fix ltr and gtr when using the `*` range
* Fix for range `*` with a prerelease identifier

@ -1,19 +1,26 @@
{ {
"name": "semver", "name": "semver",
"version": "6.3.0", "version": "6.3.1",
"description": "The semantic version parser used by npm.", "description": "The semantic version parser used by npm.",
"main": "semver.js", "main": "semver.js",
"scripts": { "scripts": {
"test": "tap", "test": "tap test/ --100 --timeout=30",
"preversion": "npm test", "lint": "echo linting disabled",
"postversion": "npm publish", "postlint": "template-oss-check",
"postpublish": "git push origin --follow-tags" "template-oss-apply": "template-oss-apply --force",
"lintfix": "npm run lint -- --fix",
"snap": "tap test/ --100 --timeout=30",
"posttest": "npm run lint"
}, },
"devDependencies": { "devDependencies": {
"tap": "^14.3.1" "@npmcli/template-oss": "4.17.0",
"tap": "^12.7.0"
}, },
"license": "ISC", "license": "ISC",
"repository": "https://github.com/npm/node-semver", "repository": {
"type": "git",
"url": "https://github.com/npm/node-semver.git"
},
"bin": { "bin": {
"semver": "./bin/semver.js" "semver": "./bin/semver.js"
}, },
@ -22,7 +29,10 @@
"range.bnf", "range.bnf",
"semver.js" "semver.js"
], ],
"tap": { "author": "GitHub Inc.",
"check-coverage": true "templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"content": "./scripts/template-oss",
"version": "4.17.0"
} }
} }

@ -26,8 +26,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_COMPONENT_LENGTH = 16
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
// The actual regexps go on exports.re // The actual regexps go on exports.re
var re = exports.re = [] var re = exports.re = []
var safeRe = exports.safeRe = []
var src = exports.src = [] var src = exports.src = []
var t = exports.tokens = {} var t = exports.tokens = {}
var R = 0 var R = 0
@ -36,6 +39,31 @@ function tok (n) {
t[n] = R++ t[n] = R++
} }
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
var safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
function makeSafeRe (value) {
for (var i = 0; i < safeRegexReplacements.length; i++) {
var token = safeRegexReplacements[i][0]
var max = safeRegexReplacements[i][1]
value = value
.split(token + '*').join(token + '{0,' + max + '}')
.split(token + '+').join(token + '{1,' + max + '}')
}
return value
}
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings. // validating, and parsing SemVer version strings.
@ -45,14 +73,14 @@ function tok (n) {
tok('NUMERICIDENTIFIER') tok('NUMERICIDENTIFIER')
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
tok('NUMERICIDENTIFIERLOOSE') tok('NUMERICIDENTIFIERLOOSE')
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
tok('NONNUMERICIDENTIFIER') tok('NONNUMERICIDENTIFIER')
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -94,7 +122,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
tok('BUILDIDENTIFIER') tok('BUILDIDENTIFIER')
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -174,6 +202,7 @@ src[t.COERCE] = '(^|[^\\d])' +
'(?:$|[^\\d])' '(?:$|[^\\d])'
tok('COERCERTL') tok('COERCERTL')
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -183,6 +212,7 @@ src[t.LONETILDE] = '(?:~>?)'
tok('TILDETRIM') tok('TILDETRIM')
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
var tildeTrimReplace = '$1~' var tildeTrimReplace = '$1~'
tok('TILDE') tok('TILDE')
@ -198,6 +228,7 @@ src[t.LONECARET] = '(?:\\^)'
tok('CARETTRIM') tok('CARETTRIM')
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
var caretTrimReplace = '$1^' var caretTrimReplace = '$1^'
tok('CARET') tok('CARET')
@ -219,6 +250,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
// this one has to use the /g flag // this one has to use the /g flag
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
var comparatorTrimReplace = '$1$2$3' var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4` // Something like `1.2.3 - 1.2.4`
@ -247,6 +279,14 @@ for (var i = 0; i < R; i++) {
debug(i, src[i]) debug(i, src[i])
if (!re[i]) { if (!re[i]) {
re[i] = new RegExp(src[i]) re[i] = new RegExp(src[i])
// Replace all greedy whitespace to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
safeRe[i] = new RegExp(makeSafeRe(src[i]))
} }
} }
@ -271,7 +311,7 @@ function parse (version, options) {
return null return null
} }
var r = options.loose ? re[t.LOOSE] : re[t.FULL] var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
if (!r.test(version)) { if (!r.test(version)) {
return null return null
} }
@ -326,7 +366,7 @@ function SemVer (version, options) {
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
if (!m) { if (!m) {
throw new TypeError('Invalid Version: ' + version) throw new TypeError('Invalid Version: ' + version)
@ -771,6 +811,7 @@ function Comparator (comp, options) {
return new Comparator(comp, options) return new Comparator(comp, options)
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -787,7 +828,7 @@ function Comparator (comp, options) {
var ANY = {} var ANY = {}
Comparator.prototype.parse = function (comp) { Comparator.prototype.parse = function (comp) {
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var m = comp.match(r) var m = comp.match(r)
if (!m) { if (!m) {
@ -911,9 +952,16 @@ function Range (range, options) {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
// on potentially slow regexes like \s*. This is then stored and used for
// future error messages as well.
this.raw = range this.raw = range
this.set = range.split(/\s*\|\|\s*/).map(function (range) { .trim()
.split(/\s+/)
.join(' ')
// First, split based on boolean or ||
this.set = this.raw.split('||').map(function (range) {
return this.parseRange(range.trim()) return this.parseRange(range.trim())
}, this).filter(function (c) { }, this).filter(function (c) {
// throw out any that are not relevant for whatever reason // throw out any that are not relevant for whatever reason
@ -921,7 +969,7 @@ function Range (range, options) {
}) })
if (!this.set.length) { if (!this.set.length) {
throw new TypeError('Invalid SemVer Range: ' + range) throw new TypeError('Invalid SemVer Range: ' + this.raw)
} }
this.format() this.format()
@ -940,20 +988,19 @@ Range.prototype.toString = function () {
Range.prototype.parseRange = function (range) { Range.prototype.parseRange = function (range) {
var loose = this.options.loose var loose = this.options.loose
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace) range = range.replace(hr, hyphenReplace)
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range, re[t.COMPARATORTRIM]) debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
// normalize spaces // normalize spaces
range = range.split(/\s+/).join(' ') range = range.split(/\s+/).join(' ')
@ -961,7 +1008,7 @@ Range.prototype.parseRange = function (range) {
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
var set = range.split(' ').map(function (comp) { var set = range.split(' ').map(function (comp) {
return parseComparator(comp, this.options) return parseComparator(comp, this.options)
}, this).join(' ').split(/\s+/) }, this).join(' ').split(/\s+/)
@ -1061,7 +1108,7 @@ function replaceTildes (comp, options) {
} }
function replaceTilde (comp, options) { function replaceTilde (comp, options) {
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('tilde', comp, _, M, m, p, pr) debug('tilde', comp, _, M, m, p, pr)
var ret var ret
@ -1102,7 +1149,7 @@ function replaceCarets (comp, options) {
function replaceCaret (comp, options) { function replaceCaret (comp, options) {
debug('caret', comp, options) debug('caret', comp, options)
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
return comp.replace(r, function (_, M, m, p, pr) { return comp.replace(r, function (_, M, m, p, pr) {
debug('caret', comp, _, M, m, p, pr) debug('caret', comp, _, M, m, p, pr)
var ret var ret
@ -1161,7 +1208,7 @@ function replaceXRanges (comp, options) {
function replaceXRange (comp, options) { function replaceXRange (comp, options) {
comp = comp.trim() comp = comp.trim()
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
return comp.replace(r, function (ret, gtlt, M, m, p, pr) { return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
debug('xRange', comp, ret, gtlt, M, m, p, pr) debug('xRange', comp, ret, gtlt, M, m, p, pr)
var xM = isX(M) var xM = isX(M)
@ -1236,7 +1283,7 @@ function replaceXRange (comp, options) {
function replaceStars (comp, options) { function replaceStars (comp, options) {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp.trim().replace(safeRe[t.STAR], '')
} }
// This function is passed to string.replace(re[t.HYPHENRANGE]) // This function is passed to string.replace(re[t.HYPHENRANGE])
@ -1562,7 +1609,7 @@ function coerce (version, options) {
var match = null var match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(safeRe[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
@ -1573,17 +1620,17 @@ function coerce (version, options) {
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
var next var next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = safeRe[t.COERCERTL].exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 safeRe[t.COERCERTL].lastIndex = -1
} }
if (match === null) { if (match === null) {

@ -25,7 +25,7 @@ semver.valid(semver.coerce('v2')) // '2.0.0'
semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
``` ```
You can also just load the module for the function that you care about, if You can also just load the module for the function that you care about if
you'd like to minimize your footprint. you'd like to minimize your footprint.
```js ```js
@ -78,8 +78,8 @@ const semverOutside = require('semver/ranges/outside')
const semverGtr = require('semver/ranges/gtr') const semverGtr = require('semver/ranges/gtr')
const semverLtr = require('semver/ranges/ltr') const semverLtr = require('semver/ranges/ltr')
const semverIntersects = require('semver/ranges/intersects') const semverIntersects = require('semver/ranges/intersects')
const simplifyRange = require('semver/ranges/simplify') const semverSimplifyRange = require('semver/ranges/simplify')
const rangeSubset = require('semver/ranges/subset') const semverRangeSubset = require('semver/ranges/subset')
``` ```
As a command-line utility: As a command-line utility:
@ -110,6 +110,9 @@ Options:
-l --loose -l --loose
Interpret versions and ranges loosely Interpret versions and ranges loosely
-n <0|1>
This is the base to be used for the prerelease identifier.
-p --include-prerelease -p --include-prerelease
Always include prerelease versions in range matching Always include prerelease versions in range matching
@ -141,7 +144,7 @@ A leading `"="` or `"v"` character is stripped off and ignored.
## Ranges ## Ranges
A `version range` is a set of `comparators` which specify versions A `version range` is a set of `comparators` that specify versions
that satisfy the range. that satisfy the range.
A `comparator` is composed of an `operator` and a `version`. The set A `comparator` is composed of an `operator` and a `version`. The set
@ -152,11 +155,13 @@ of primitive `operators` is:
* `>` Greater than * `>` Greater than
* `>=` Greater than or equal to * `>=` Greater than or equal to
* `=` Equal. If no operator is specified, then equality is assumed, * `=` Equal. If no operator is specified, then equality is assumed,
so this operator is optional, but MAY be included. so this operator is optional but MAY be included.
For example, the comparator `>=1.2.7` would match the versions For example, the comparator `>=1.2.7` would match the versions
`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` `1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
or `1.1.0`. or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and
would match the versions `2.0.0` and `3.1.0`, but not the versions
`1.0.1` or `1.1.0`.
Comparators can be joined by whitespace to form a `comparator set`, Comparators can be joined by whitespace to form a `comparator set`,
which is satisfied by the **intersection** of all of the comparators which is satisfied by the **intersection** of all of the comparators
@ -184,26 +189,26 @@ For example, the range `>1.2.3-alpha.3` would be allowed to match the
version `1.2.3-alpha.7`, but it would *not* be satisfied by version `1.2.3-alpha.7`, but it would *not* be satisfied by
`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater `3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
than" `1.2.3-alpha.3` according to the SemVer sort rules. The version than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
range only accepts prerelease tags on the `1.2.3` version. The range only accepts prerelease tags on the `1.2.3` version.
version `3.4.5` *would* satisfy the range, because it does not have a Version `3.4.5` *would* satisfy the range because it does not have a
prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
The purpose for this behavior is twofold. First, prerelease versions The purpose of this behavior is twofold. First, prerelease versions
frequently are updated very quickly, and contain many breaking changes frequently are updated very quickly, and contain many breaking changes
that are (by the author's design) not yet fit for public consumption. that are (by the author's design) not yet fit for public consumption.
Therefore, by default, they are excluded from range matching Therefore, by default, they are excluded from range-matching
semantics. semantics.
Second, a user who has opted into using a prerelease version has Second, a user who has opted into using a prerelease version has
clearly indicated the intent to use *that specific* set of indicated the intent to use *that specific* set of
alpha/beta/rc versions. By including a prerelease tag in the range, alpha/beta/rc versions. By including a prerelease tag in the range,
the user is indicating that they are aware of the risk. However, it the user is indicating that they are aware of the risk. However, it
is still not appropriate to assume that they have opted into taking a is still not appropriate to assume that they have opted into taking a
similar risk on the *next* set of prerelease versions. similar risk on the *next* set of prerelease versions.
Note that this behavior can be suppressed (treating all prerelease Note that this behavior can be suppressed (treating all prerelease
versions as if they were normal versions, for the purpose of range versions as if they were normal versions, for range-matching)
matching) by setting the `includePrerelease` flag on the options by setting the `includePrerelease` flag on the options
object to any object to any
[functions](https://github.com/npm/node-semver#functions) that do [functions](https://github.com/npm/node-semver#functions) that do
range matching. range matching.
@ -232,6 +237,35 @@ $ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1 1.2.4-beta.1
``` ```
#### Prerelease Identifier Base
The method `.inc` takes an optional parameter 'identifierBase' string
that will let you let your prerelease number as zero-based or one-based.
Set to `false` to omit the prerelease number altogether.
If you do not specify this parameter, it will default to zero-based.
```javascript
semver.inc('1.2.3', 'prerelease', 'beta', '1')
// '1.2.4-beta.1'
```
```javascript
semver.inc('1.2.3', 'prerelease', 'beta', false)
// '1.2.4-beta'
```
command-line example:
```bash
$ semver 1.2.3 -i prerelease --preid beta -n 1
1.2.4-beta.1
```
```bash
$ semver 1.2.3 -i prerelease --preid beta -n false
1.2.4-beta
```
### Advanced Range Syntax ### Advanced Range Syntax
Advanced range syntax desugars to primitive comparators in Advanced range syntax desugars to primitive comparators in
@ -367,12 +401,12 @@ All methods and classes take a final `options` object argument. All
options in this object are `false` by default. The options supported options in this object are `false` by default. The options supported
are: are:
- `loose` Be more forgiving about not-quite-valid semver strings. - `loose`: Be more forgiving about not-quite-valid semver strings.
(Any resulting output will always be 100% strict compliant, of (Any resulting output will always be 100% strict compliant, of
course.) For backwards compatibility reasons, if the `options` course.) For backwards compatibility reasons, if the `options`
argument is a boolean value instead of an object, it is interpreted argument is a boolean value instead of an object, it is interpreted
to be the `loose` param. to be the `loose` param.
- `includePrerelease` Set to suppress the [default - `includePrerelease`: Set to suppress the [default
behavior](https://github.com/npm/node-semver#prerelease-tags) of behavior](https://github.com/npm/node-semver#prerelease-tags) of
excluding prerelease tagged versions from ranges unless they are excluding prerelease tagged versions from ranges unless they are
explicitly opted into. explicitly opted into.
@ -381,16 +415,20 @@ Strict-mode Comparators and Ranges will be strict about the SemVer
strings that they parse. strings that they parse.
* `valid(v)`: Return the parsed version, or null if it's not valid. * `valid(v)`: Return the parsed version, or null if it's not valid.
* `inc(v, release)`: Return the version incremented by the release * `inc(v, release, options, identifier, identifierBase)`:
Return the version incremented by the release
type (`major`, `premajor`, `minor`, `preminor`, `patch`, type (`major`, `premajor`, `minor`, `preminor`, `patch`,
`prepatch`, or `prerelease`), or null if it's not valid `prepatch`, or `prerelease`), or null if it's not valid
* `premajor` in one call will bump the version up to the next major * `premajor` in one call will bump the version up to the next major
version and down to a prerelease of that major version. version and down to a prerelease of that major version.
`preminor`, and `prepatch` work the same way. `preminor`, and `prepatch` work the same way.
* If called from a non-prerelease version, the `prerelease` will work the * If called from a non-prerelease version, `prerelease` will work the
same as `prepatch`. It increments the patch version, then makes a same as `prepatch`. It increments the patch version and then makes a
prerelease. If the input version is already a prerelease it simply prerelease. If the input version is already a prerelease it simply
increments it. increments it.
* `identifier` can be used to prefix `premajor`, `preminor`,
`prepatch`, or `prerelease` version increments. `identifierBase`
is the base to be used for the `prerelease` identifier.
* `prerelease(v)`: Returns an array of prerelease components, or null * `prerelease(v)`: Returns an array of prerelease components, or null
if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
* `major(v)`: Return the major version number. * `major(v)`: Return the major version number.
@ -408,7 +446,7 @@ strings that they parse.
* `lt(v1, v2)`: `v1 < v2` * `lt(v1, v2)`: `v1 < v2`
* `lte(v1, v2)`: `v1 <= v2` * `lte(v1, v2)`: `v1 <= v2`
* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, * `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
even if they're not the exact same string. You already know how to even if they're not the same string. You already know how to
compare strings. compare strings.
* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. * `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call * `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
@ -417,15 +455,22 @@ strings that they parse.
invalid comparison string is provided. invalid comparison string is provided.
* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if * `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`. `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions * `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions
in descending order when passed to `Array.sort()`. in descending order when passed to `Array.sort()`.
* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions * `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions
are equal. Sorts in ascending order if passed to `Array.sort()`. are equal. Sorts in ascending order if passed to `Array.sort()`.
`v2` is greater. Sorts in ascending order if passed to `Array.sort()`. * `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`.
* `diff(v1, v2)`: Returns difference between two versions by the release type * `diff(v1, v2)`: Returns the difference between two versions by the release type
(`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
or null if the versions are the same. or null if the versions are the same.
### Sorting
* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild`
function.
* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on
the `compareBuild` function in descending order.
### Comparators ### Comparators
* `intersects(comparator)`: Return true if the comparators intersect * `intersects(comparator)`: Return true if the comparators intersect
@ -439,19 +484,19 @@ strings that they parse.
that satisfies the range, or `null` if none of them do. that satisfies the range, or `null` if none of them do.
* `minSatisfying(versions, range)`: Return the lowest version in the list * `minSatisfying(versions, range)`: Return the lowest version in the list
that satisfies the range, or `null` if none of them do. that satisfies the range, or `null` if none of them do.
* `minVersion(range)`: Return the lowest version that can possibly match * `minVersion(range)`: Return the lowest version that can match
the given range. the given range.
* `gtr(version, range)`: Return `true` if version is greater than all the * `gtr(version, range)`: Return `true` if the version is greater than all the
versions possible in the range. versions possible in the range.
* `ltr(version, range)`: Return `true` if version is less than all the * `ltr(version, range)`: Return `true` if the version is less than all the
versions possible in the range. versions possible in the range.
* `outside(version, range, hilo)`: Return true if the version is outside * `outside(version, range, hilo)`: Return true if the version is outside
the bounds of the range in either the high or low direction. The the bounds of the range in either the high or low direction. The
`hilo` argument must be either the string `'>'` or `'<'`. (This is `hilo` argument must be either the string `'>'` or `'<'`. (This is
the function called by `gtr` and `ltr`.) the function called by `gtr` and `ltr`.)
* `intersects(range)`: Return true if any of the ranges comparators intersect * `intersects(range)`: Return true if any of the range comparators intersect.
* `simplifyRange(versions, range)`: Return a "simplified" range that * `simplifyRange(versions, range)`: Return a "simplified" range that
matches the same items in `versions` list as the range specified. Note matches the same items in the `versions` list as the range specified. Note
that it does *not* guarantee that it would match the same versions in all that it does *not* guarantee that it would match the same versions in all
cases, only for the set of versions provided. This is useful when cases, only for the set of versions provided. This is useful when
generating ranges by joining together multiple versions with `||` generating ranges by joining together multiple versions with `||`
@ -464,7 +509,7 @@ strings that they parse.
Note that, since ranges may be non-contiguous, a version might not be Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, *or* satisfy a range! For greater than a range, less than a range, *or* satisfy a range! For
example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
until `2.0.0`, so the version `1.2.10` would not be greater than the until `2.0.0`, so version `1.2.10` would not be greater than the
range (because `2.0.1` satisfies, which is higher), nor less than the range (because `2.0.1` satisfies, which is higher), nor less than the
range (since `1.2.8` satisfies, which is lower), and it also does not range (since `1.2.8` satisfies, which is lower), and it also does not
satisfy the range. satisfy the range.
@ -477,7 +522,7 @@ range, use the `satisfies(version, range)` function.
* `coerce(version, options)`: Coerces a string to semver if possible * `coerce(version, options)`: Coerces a string to semver if possible
This aims to provide a very forgiving translation of a non-semver string to This aims to provide a very forgiving translation of a non-semver string to
semver. It looks for the first digit in a string, and consumes all semver. It looks for the first digit in a string and consumes all
remaining characters which satisfy at least a partial semver (e.g., `1`, remaining characters which satisfy at least a partial semver (e.g., `1`,
`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer `1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
@ -495,6 +540,10 @@ tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not
`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of `4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of
any other overlapping SemVer tuple. any other overlapping SemVer tuple.
If the `options.includePrerelease` flag is set, then the `coerce` result will contain
prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2`
will preserve prerelease `rc.1` and build `rev.2` in the result.
### Clean ### Clean
* `clean(version)`: Clean a string to be a valid semver if possible * `clean(version)`: Clean a string to be a valid semver if possible
@ -509,10 +558,44 @@ ex.
* `s.clean(' = v 2.1.5-foo')`: `null` * `s.clean(' = v 2.1.5-foo')`: `null`
* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` * `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'`
* `s.clean('=v2.1.5')`: `'2.1.5'` * `s.clean('=v2.1.5')`: `'2.1.5'`
* `s.clean(' =v2.1.5')`: `2.1.5` * `s.clean(' =v2.1.5')`: `'2.1.5'`
* `s.clean(' 2.1.5 ')`: `'2.1.5'` * `s.clean(' 2.1.5 ')`: `'2.1.5'`
* `s.clean('~1.0.0')`: `null` * `s.clean('~1.0.0')`: `null`
## Constants
As a convenience, helper constants are exported to provide information about what `node-semver` supports:
### `RELEASE_TYPES`
- major
- premajor
- minor
- preminor
- patch
- prepatch
- prerelease
```
const semver = require('semver');
if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) {
console.log('This is a valid release type!');
} else {
console.warn('This is NOT a valid release type!');
}
```
### `SEMVER_SPEC_VERSION`
2.0.0
```
const semver = require('semver');
console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION);
```
## Exported Modules ## Exported Modules
<!-- <!--
@ -521,7 +604,7 @@ eg), and then pull the module name into the documentation for that specific
thing. thing.
--> -->
You may pull in just the part of this semver utility that you need, if you You may pull in just the part of this semver utility that you need if you
are sensitive to packing and tree-shaking concerns. The main are sensitive to packing and tree-shaking concerns. The main
`require('semver')` export uses getter functions to lazily load the parts `require('semver')` export uses getter functions to lazily load the parts
of the API that are used. of the API that are used.
@ -564,5 +647,8 @@ The following modules are available:
* `require('semver/ranges/min-satisfying')` * `require('semver/ranges/min-satisfying')`
* `require('semver/ranges/min-version')` * `require('semver/ranges/min-version')`
* `require('semver/ranges/outside')` * `require('semver/ranges/outside')`
* `require('semver/ranges/simplify')`
* `require('semver/ranges/subset')`
* `require('semver/ranges/to-comparators')` * `require('semver/ranges/to-comparators')`
* `require('semver/ranges/valid')` * `require('semver/ranges/valid')`

@ -23,7 +23,10 @@ let rtl = false
let identifier let identifier
let identifierBase
const semver = require('../') const semver = require('../')
const parseOptions = require('../internal/parse-options')
let reverse = false let reverse = false
@ -71,6 +74,12 @@ const main = () => {
case '-r': case '--range': case '-r': case '--range':
range.push(argv.shift()) range.push(argv.shift())
break break
case '-n':
identifierBase = argv.shift()
if (identifierBase === 'false') {
identifierBase = false
}
break
case '-c': case '--coerce': case '-c': case '--coerce':
coerce = true coerce = true
break break
@ -88,7 +97,7 @@ const main = () => {
} }
} }
options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } options = parseOptions({ loose, includePrerelease, rtl })
versions = versions.map((v) => { versions = versions.map((v) => {
return coerce ? (semver.coerce(v, options) || { version: v }).version : v return coerce ? (semver.coerce(v, options) || { version: v }).version : v
@ -110,7 +119,11 @@ const main = () => {
return fail() return fail()
} }
} }
return success(versions) versions
.sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options))
.map(v => semver.clean(v, options))
.map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v)
.forEach(v => console.log(v))
} }
const failInc = () => { const failInc = () => {
@ -120,19 +133,6 @@ const failInc = () => {
const fail = () => process.exit(1) const fail = () => process.exit(1)
const success = () => {
const compare = reverse ? 'rcompare' : 'compare'
versions.sort((a, b) => {
return semver[compare](a, b, options)
}).map((v) => {
return semver.clean(v, options)
}).map((v) => {
return inc ? semver.inc(v, inc, options, identifier) : v
}).forEach((v, i, _) => {
console.log(v)
})
}
const help = () => console.log( const help = () => console.log(
`SemVer ${version} `SemVer ${version}
@ -172,6 +172,11 @@ Options:
--ltr --ltr
Coerce version strings left to right (default) Coerce version strings left to right (default)
-n <base>
Base number to be used for the prerelease identifier.
Can be either 0 or 1, or false to omit the number altogether.
Defaults to 0.
Program exits successfully if any valid version satisfies Program exits successfully if any valid version satisfies
all supplied ranges, and prints all satisfying versions. all supplied ranges, and prints all satisfying versions.

@ -16,6 +16,7 @@ class Comparator {
} }
} }
comp = comp.trim().split(/\s+/).join(' ')
debug('comparator', comp, options) debug('comparator', comp, options)
this.options = options this.options = options
this.loose = !!options.loose this.loose = !!options.loose
@ -78,13 +79,6 @@ class Comparator {
throw new TypeError('a Comparator is required') throw new TypeError('a Comparator is required')
} }
if (!options || typeof options !== 'object') {
options = {
loose: !!options,
includePrerelease: false,
}
}
if (this.operator === '') { if (this.operator === '') {
if (this.value === '') { if (this.value === '') {
return true return true
@ -97,39 +91,50 @@ class Comparator {
return new Range(this.value, options).test(comp.semver) return new Range(this.value, options).test(comp.semver)
} }
const sameDirectionIncreasing = options = parseOptions(options)
(this.operator === '>=' || this.operator === '>') &&
(comp.operator === '>=' || comp.operator === '>') // Special cases where nothing can possibly be lower
const sameDirectionDecreasing = if (options.includePrerelease &&
(this.operator === '<=' || this.operator === '<') && (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) {
(comp.operator === '<=' || comp.operator === '<') return false
const sameSemVer = this.semver.version === comp.semver.version }
const differentDirectionsInclusive = if (!options.includePrerelease &&
(this.operator === '>=' || this.operator === '<=') && (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) {
(comp.operator === '>=' || comp.operator === '<=') return false
const oppositeDirectionsLessThan = }
cmp(this.semver, '<', comp.semver, options) &&
(this.operator === '>=' || this.operator === '>') && // Same direction increasing (> or >=)
(comp.operator === '<=' || comp.operator === '<') if (this.operator.startsWith('>') && comp.operator.startsWith('>')) {
const oppositeDirectionsGreaterThan = return true
cmp(this.semver, '>', comp.semver, options) && }
(this.operator === '<=' || this.operator === '<') && // Same direction decreasing (< or <=)
(comp.operator === '>=' || comp.operator === '>') if (this.operator.startsWith('<') && comp.operator.startsWith('<')) {
return true
return ( }
sameDirectionIncreasing || // same SemVer and both sides are inclusive (<= or >=)
sameDirectionDecreasing || if (
(sameSemVer && differentDirectionsInclusive) || (this.semver.version === comp.semver.version) &&
oppositeDirectionsLessThan || this.operator.includes('=') && comp.operator.includes('=')) {
oppositeDirectionsGreaterThan return true
) }
// opposite directions less than
if (cmp(this.semver, '<', comp.semver, options) &&
this.operator.startsWith('>') && comp.operator.startsWith('<')) {
return true
}
// opposite directions greater than
if (cmp(this.semver, '>', comp.semver, options) &&
this.operator.startsWith('<') && comp.operator.startsWith('>')) {
return true
}
return false
} }
} }
module.exports = Comparator module.exports = Comparator
const parseOptions = require('../internal/parse-options') const parseOptions = require('../internal/parse-options')
const { re, t } = require('../internal/re') const { safeRe: re, t } = require('../internal/re')
const cmp = require('../functions/cmp') const cmp = require('../functions/cmp')
const debug = require('../internal/debug') const debug = require('../internal/debug')
const SemVer = require('./semver') const SemVer = require('./semver')

@ -1,3 +1,5 @@
const SPACE_CHARACTERS = /\s+/g
// hoisted class for cyclic dependency // hoisted class for cyclic dependency
class Range { class Range {
constructor (range, options) { constructor (range, options) {
@ -18,7 +20,7 @@ class Range {
// just put it in the set and return // just put it in the set and return
this.raw = range.value this.raw = range.value
this.set = [[range]] this.set = [[range]]
this.format() this.formatted = undefined
return this return this
} }
@ -26,9 +28,13 @@ class Range {
this.loose = !!options.loose this.loose = !!options.loose
this.includePrerelease = !!options.includePrerelease this.includePrerelease = !!options.includePrerelease
// First, split based on boolean or || // First reduce all whitespace as much as possible so we do not have to rely
this.raw = range // on potentially slow regexes like \s*. This is then stored and used for
this.set = range // future error messages as well.
this.raw = range.trim().replace(SPACE_CHARACTERS, ' ')
// First, split on ||
this.set = this.raw
.split('||') .split('||')
// map the range to a 2d array of comparators // map the range to a 2d array of comparators
.map(r => this.parseRange(r.trim())) .map(r => this.parseRange(r.trim()))
@ -38,7 +44,7 @@ class Range {
.filter(c => c.length) .filter(c => c.length)
if (!this.set.length) { if (!this.set.length) {
throw new TypeError(`Invalid SemVer Range: ${range}`) throw new TypeError(`Invalid SemVer Range: ${this.raw}`)
} }
// if we have any that are not the null set, throw out null sets. // if we have any that are not the null set, throw out null sets.
@ -59,16 +65,29 @@ class Range {
} }
} }
this.format() this.formatted = undefined
}
get range () {
if (this.formatted === undefined) {
this.formatted = ''
for (let i = 0; i < this.set.length; i++) {
if (i > 0) {
this.formatted += '||'
}
const comps = this.set[i]
for (let k = 0; k < comps.length; k++) {
if (k > 0) {
this.formatted += ' '
}
this.formatted += comps[k].toString().trim()
}
}
}
return this.formatted
} }
format () { format () {
this.range = this.set
.map((comps) => {
return comps.join(' ').trim()
})
.join('||')
.trim()
return this.range return this.range
} }
@ -77,12 +96,12 @@ class Range {
} }
parseRange (range) { parseRange (range) {
range = range.trim()
// memoize range parsing for performance. // memoize range parsing for performance.
// this is a very hot path, and fully deterministic. // this is a very hot path, and fully deterministic.
const memoOpts = Object.keys(this.options).join(',') const memoOpts =
const memoKey = `parseRange:${memoOpts}:${range}` (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) |
(this.options.loose && FLAG_LOOSE)
const memoKey = memoOpts + ':' + range
const cached = cache.get(memoKey) const cached = cache.get(memoKey)
if (cached) { if (cached) {
return cached return cached
@ -93,18 +112,18 @@ class Range {
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
debug('hyphen replace', range) debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
debug('comparator trim', range) debug('comparator trim', range)
// `~ 1.2.3` => `~1.2.3` // `~ 1.2.3` => `~1.2.3`
range = range.replace(re[t.TILDETRIM], tildeTrimReplace) range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
debug('tilde trim', range)
// `^ 1.2.3` => `^1.2.3` // `^ 1.2.3` => `^1.2.3`
range = range.replace(re[t.CARETTRIM], caretTrimReplace) range = range.replace(re[t.CARETTRIM], caretTrimReplace)
debug('caret trim', range)
// normalize spaces
range = range.split(/\s+/).join(' ')
// At this point, the range is completely trimmed and // At this point, the range is completely trimmed and
// ready to be split into comparators. // ready to be split into comparators.
@ -190,22 +209,24 @@ class Range {
return false return false
} }
} }
module.exports = Range module.exports = Range
const LRU = require('lru-cache') const LRU = require('../internal/lrucache')
const cache = new LRU({ max: 1000 }) const cache = new LRU()
const parseOptions = require('../internal/parse-options') const parseOptions = require('../internal/parse-options')
const Comparator = require('./comparator') const Comparator = require('./comparator')
const debug = require('../internal/debug') const debug = require('../internal/debug')
const SemVer = require('./semver') const SemVer = require('./semver')
const { const {
re, safeRe: re,
t, t,
comparatorTrimReplace, comparatorTrimReplace,
tildeTrimReplace, tildeTrimReplace,
caretTrimReplace, caretTrimReplace,
} = require('../internal/re') } = require('../internal/re')
const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants')
const isNullSet = c => c.value === '<0.0.0-0' const isNullSet = c => c.value === '<0.0.0-0'
const isAny = c => c.value === '' const isAny = c => c.value === ''
@ -253,10 +274,13 @@ const isX = id => !id || id.toLowerCase() === 'x' || id === '*'
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
// ~0.0.1 --> >=0.0.1 <0.1.0-0 // ~0.0.1 --> >=0.0.1 <0.1.0-0
const replaceTildes = (comp, options) => const replaceTildes = (comp, options) => {
comp.trim().split(/\s+/).map((c) => { return comp
return replaceTilde(c, options) .trim()
}).join(' ') .split(/\s+/)
.map((c) => replaceTilde(c, options))
.join(' ')
}
const replaceTilde = (comp, options) => { const replaceTilde = (comp, options) => {
const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
@ -294,10 +318,13 @@ const replaceTilde = (comp, options) => {
// ^1.2.0 --> >=1.2.0 <2.0.0-0 // ^1.2.0 --> >=1.2.0 <2.0.0-0
// ^0.0.1 --> >=0.0.1 <0.0.2-0 // ^0.0.1 --> >=0.0.1 <0.0.2-0
// ^0.1.0 --> >=0.1.0 <0.2.0-0 // ^0.1.0 --> >=0.1.0 <0.2.0-0
const replaceCarets = (comp, options) => const replaceCarets = (comp, options) => {
comp.trim().split(/\s+/).map((c) => { return comp
return replaceCaret(c, options) .trim()
}).join(' ') .split(/\s+/)
.map((c) => replaceCaret(c, options))
.join(' ')
}
const replaceCaret = (comp, options) => { const replaceCaret = (comp, options) => {
debug('caret', comp, options) debug('caret', comp, options)
@ -354,9 +381,10 @@ const replaceCaret = (comp, options) => {
const replaceXRanges = (comp, options) => { const replaceXRanges = (comp, options) => {
debug('replaceXRanges', comp, options) debug('replaceXRanges', comp, options)
return comp.split(/\s+/).map((c) => { return comp
return replaceXRange(c, options) .split(/\s+/)
}).join(' ') .map((c) => replaceXRange(c, options))
.join(' ')
} }
const replaceXRange = (comp, options) => { const replaceXRange = (comp, options) => {
@ -439,12 +467,15 @@ const replaceXRange = (comp, options) => {
const replaceStars = (comp, options) => { const replaceStars = (comp, options) => {
debug('replaceStars', comp, options) debug('replaceStars', comp, options)
// Looseness is ignored here. star is always as loose as it gets! // Looseness is ignored here. star is always as loose as it gets!
return comp.trim().replace(re[t.STAR], '') return comp
.trim()
.replace(re[t.STAR], '')
} }
const replaceGTE0 = (comp, options) => { const replaceGTE0 = (comp, options) => {
debug('replaceGTE0', comp, options) debug('replaceGTE0', comp, options)
return comp.trim() return comp
.trim()
.replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')
} }
@ -453,9 +484,10 @@ const replaceGTE0 = (comp, options) => {
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do // 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 // 1.2 - 3.4 => >=1.2.0 <3.5.0-0
// TODO build?
const hyphenReplace = incPr => ($0, const hyphenReplace = incPr => ($0,
from, fM, fm, fp, fpr, fb, from, fM, fm, fp, fpr, fb,
to, tM, tm, tp, tpr, tb) => { to, tM, tm, tp, tpr) => {
if (isX(fM)) { if (isX(fM)) {
from = '' from = ''
} else if (isX(fm)) { } else if (isX(fm)) {
@ -482,7 +514,7 @@ const hyphenReplace = incPr => ($0,
to = `<=${to}` to = `<=${to}`
} }
return (`${from} ${to}`).trim() return `${from} ${to}`.trim()
} }
const testSet = (set, version, options) => { const testSet = (set, version, options) => {

@ -1,6 +1,6 @@
const debug = require('../internal/debug') const debug = require('../internal/debug')
const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants')
const { re, t } = require('../internal/re') const { safeRe: re, t } = require('../internal/re')
const parseOptions = require('../internal/parse-options') const parseOptions = require('../internal/parse-options')
const { compareIdentifiers } = require('../internal/identifiers') const { compareIdentifiers } = require('../internal/identifiers')
@ -16,7 +16,7 @@ class SemVer {
version = version.version version = version.version
} }
} else if (typeof version !== 'string') { } else if (typeof version !== 'string') {
throw new TypeError(`Invalid Version: ${version}`) throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`)
} }
if (version.length > MAX_LENGTH) { if (version.length > MAX_LENGTH) {
@ -158,7 +158,7 @@ class SemVer {
do { do {
const a = this.build[i] const a = this.build[i]
const b = other.build[i] const b = other.build[i]
debug('prerelease compare', i, a, b) debug('build compare', i, a, b)
if (a === undefined && b === undefined) { if (a === undefined && b === undefined) {
return 0 return 0
} else if (b === undefined) { } else if (b === undefined) {
@ -175,36 +175,36 @@ class SemVer {
// preminor will bump the version up to the next minor release, and immediately // preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way. // down to pre-release. premajor and prepatch work the same way.
inc (release, identifier) { inc (release, identifier, identifierBase) {
switch (release) { switch (release) {
case 'premajor': case 'premajor':
this.prerelease.length = 0 this.prerelease.length = 0
this.patch = 0 this.patch = 0
this.minor = 0 this.minor = 0
this.major++ this.major++
this.inc('pre', identifier) this.inc('pre', identifier, identifierBase)
break break
case 'preminor': case 'preminor':
this.prerelease.length = 0 this.prerelease.length = 0
this.patch = 0 this.patch = 0
this.minor++ this.minor++
this.inc('pre', identifier) this.inc('pre', identifier, identifierBase)
break break
case 'prepatch': case 'prepatch':
// If this is already a prerelease, it will bump to the next version // If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not // drop any prereleases that might already exist, since they are not
// relevant at this point. // relevant at this point.
this.prerelease.length = 0 this.prerelease.length = 0
this.inc('patch', identifier) this.inc('patch', identifier, identifierBase)
this.inc('pre', identifier) this.inc('pre', identifier, identifierBase)
break break
// If the input is a non-prerelease version, this acts the same as // If the input is a non-prerelease version, this acts the same as
// prepatch. // prepatch.
case 'prerelease': case 'prerelease':
if (this.prerelease.length === 0) { if (this.prerelease.length === 0) {
this.inc('patch', identifier) this.inc('patch', identifier, identifierBase)
} }
this.inc('pre', identifier) this.inc('pre', identifier, identifierBase)
break break
case 'major': case 'major':
@ -246,9 +246,15 @@ class SemVer {
break break
// This probably shouldn't be used publicly. // This probably shouldn't be used publicly.
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
case 'pre': case 'pre': {
const base = Number(identifierBase) ? 1 : 0
if (!identifier && identifierBase === false) {
throw new Error('invalid increment argument: identifier is empty')
}
if (this.prerelease.length === 0) { if (this.prerelease.length === 0) {
this.prerelease = [0] this.prerelease = [base]
} else { } else {
let i = this.prerelease.length let i = this.prerelease.length
while (--i >= 0) { while (--i >= 0) {
@ -259,27 +265,36 @@ class SemVer {
} }
if (i === -1) { if (i === -1) {
// didn't increment anything // didn't increment anything
this.prerelease.push(0) if (identifier === this.prerelease.join('.') && identifierBase === false) {
throw new Error('invalid increment argument: identifier already exists')
}
this.prerelease.push(base)
} }
} }
if (identifier) { if (identifier) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
let prerelease = [identifier, base]
if (identifierBase === false) {
prerelease = [identifier]
}
if (compareIdentifiers(this.prerelease[0], identifier) === 0) { if (compareIdentifiers(this.prerelease[0], identifier) === 0) {
if (isNaN(this.prerelease[1])) { if (isNaN(this.prerelease[1])) {
this.prerelease = [identifier, 0] this.prerelease = prerelease
} }
} else { } else {
this.prerelease = [identifier, 0] this.prerelease = prerelease
} }
} }
break break
}
default: default:
throw new Error(`invalid increment argument: ${release}`) throw new Error(`invalid increment argument: ${release}`)
} }
this.format() this.raw = this.format()
this.raw = this.version if (this.build.length) {
this.raw += `+${this.build.join('.')}`
}
return this return this
} }
} }

@ -1,6 +1,6 @@
const SemVer = require('../classes/semver') const SemVer = require('../classes/semver')
const parse = require('./parse') const parse = require('./parse')
const { re, t } = require('../internal/re') const { safeRe: re, t } = require('../internal/re')
const coerce = (version, options) => { const coerce = (version, options) => {
if (version instanceof SemVer) { if (version instanceof SemVer) {
@ -19,34 +19,42 @@ const coerce = (version, options) => {
let match = null let match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
// //
// Walk through the string checking with a /g regexp // Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches. // Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next let next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 coerceRtlRegex.lastIndex = -1
} }
if (match === null) { if (match === null) {
return null return null
} }
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
} }
module.exports = coerce module.exports = coerce

@ -1,23 +1,65 @@
const parse = require('./parse') const parse = require('./parse.js')
const eq = require('./eq')
const diff = (version1, version2) => { const diff = (version1, version2) => {
if (eq(version1, version2)) { const v1 = parse(version1, null, true)
const v2 = parse(version2, null, true)
const comparison = v1.compare(v2)
if (comparison === 0) {
return null return null
} else {
const v1 = parse(version1)
const v2 = parse(version2)
const hasPre = v1.prerelease.length || v2.prerelease.length
const prefix = hasPre ? 'pre' : ''
const defaultResult = hasPre ? 'prerelease' : ''
for (const key in v1) {
if (key === 'major' || key === 'minor' || key === 'patch') {
if (v1[key] !== v2[key]) {
return prefix + key
} }
const v1Higher = comparison > 0
const highVersion = v1Higher ? v1 : v2
const lowVersion = v1Higher ? v2 : v1
const highHasPre = !!highVersion.prerelease.length
const lowHasPre = !!lowVersion.prerelease.length
if (lowHasPre && !highHasPre) {
// Going from prerelease -> no prerelease requires some special casing
// If the low version has only a major, then it will always be a major
// Some examples:
// 1.0.0-1 -> 1.0.0
// 1.0.0-1 -> 1.1.1
// 1.0.0-1 -> 2.0.0
if (!lowVersion.patch && !lowVersion.minor) {
return 'major'
}
// Otherwise it can be determined by checking the high version
if (highVersion.patch) {
// anything higher than a patch bump would result in the wrong version
return 'patch'
}
if (highVersion.minor) {
// anything higher than a minor bump would result in the wrong version
return 'minor'
}
// bumping major/minor/patch all have same result
return 'major'
} }
// add the `pre` prefix if we are going to a prerelease version
const prefix = highHasPre ? 'pre' : ''
if (v1.major !== v2.major) {
return prefix + 'major'
} }
return defaultResult // may be undefined
if (v1.minor !== v2.minor) {
return prefix + 'minor'
} }
if (v1.patch !== v2.patch) {
return prefix + 'patch'
}
// high and low are preleases
return 'prerelease'
} }
module.exports = diff module.exports = diff

@ -1,7 +1,8 @@
const SemVer = require('../classes/semver') const SemVer = require('../classes/semver')
const inc = (version, release, options, identifier) => { const inc = (version, release, options, identifier, identifierBase) => {
if (typeof (options) === 'string') { if (typeof (options) === 'string') {
identifierBase = identifier
identifier = options identifier = options
options = undefined options = undefined
} }
@ -10,7 +11,7 @@ const inc = (version, release, options, identifier) => {
return new SemVer( return new SemVer(
version instanceof SemVer ? version.version : version, version instanceof SemVer ? version.version : version,
options options
).inc(release, identifier).version ).inc(release, identifier, identifierBase).version
} catch (er) { } catch (er) {
return null return null
} }

@ -1,33 +1,16 @@
const { MAX_LENGTH } = require('../internal/constants')
const { re, t } = require('../internal/re')
const SemVer = require('../classes/semver') const SemVer = require('../classes/semver')
const parse = (version, options, throwErrors = false) => {
const parseOptions = require('../internal/parse-options')
const parse = (version, options) => {
options = parseOptions(options)
if (version instanceof SemVer) { if (version instanceof SemVer) {
return version return version
} }
if (typeof version !== 'string') {
return null
}
if (version.length > MAX_LENGTH) {
return null
}
const r = options.loose ? re[t.LOOSE] : re[t.FULL]
if (!r.test(version)) {
return null
}
try { try {
return new SemVer(version, options) return new SemVer(version, options)
} catch (er) { } catch (er) {
if (!throwErrors) {
return null return null
} }
throw er
}
} }
module.exports = parse module.exports = parse

@ -83,6 +83,7 @@ module.exports = {
src: internalRe.src, src: internalRe.src,
tokens: internalRe.t, tokens: internalRe.t,
SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION,
RELEASE_TYPES: constants.RELEASE_TYPES,
compareIdentifiers: identifiers.compareIdentifiers, compareIdentifiers: identifiers.compareIdentifiers,
rcompareIdentifiers: identifiers.rcompareIdentifiers, rcompareIdentifiers: identifiers.rcompareIdentifiers,
} }

@ -9,9 +9,27 @@ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
// Max safe segment length for coercion. // Max safe segment length for coercion.
const MAX_SAFE_COMPONENT_LENGTH = 16 const MAX_SAFE_COMPONENT_LENGTH = 16
// Max safe length for a build identifier. The max length minus 6 characters for
// the shortest version with a build 0.0.0+BUILD.
const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
const RELEASE_TYPES = [
'major',
'premajor',
'minor',
'preminor',
'patch',
'prepatch',
'prerelease',
]
module.exports = { module.exports = {
SEMVER_SPEC_VERSION,
MAX_LENGTH, MAX_LENGTH,
MAX_SAFE_INTEGER,
MAX_SAFE_COMPONENT_LENGTH, MAX_SAFE_COMPONENT_LENGTH,
MAX_SAFE_BUILD_LENGTH,
MAX_SAFE_INTEGER,
RELEASE_TYPES,
SEMVER_SPEC_VERSION,
FLAG_INCLUDE_PRERELEASE: 0b001,
FLAG_LOOSE: 0b010,
} }

@ -1,11 +1,15 @@
// parse out just the options we care about so we always get a consistent // parse out just the options we care about
// obj with keys in a consistent order. const looseOption = Object.freeze({ loose: true })
const opts = ['includePrerelease', 'loose', 'rtl'] const emptyOpts = Object.freeze({ })
const parseOptions = options => const parseOptions = options => {
!options ? {} if (!options) {
: typeof options !== 'object' ? { loose: true } return emptyOpts
: opts.filter(k => options[k]).reduce((o, k) => { }
o[k] = true
return o if (typeof options !== 'object') {
}, {}) return looseOption
}
return options
}
module.exports = parseOptions module.exports = parseOptions

@ -1,19 +1,49 @@
const { MAX_SAFE_COMPONENT_LENGTH } = require('./constants') const {
MAX_SAFE_COMPONENT_LENGTH,
MAX_SAFE_BUILD_LENGTH,
MAX_LENGTH,
} = require('./constants')
const debug = require('./debug') const debug = require('./debug')
exports = module.exports = {} exports = module.exports = {}
// The actual regexps go on exports.re // The actual regexps go on exports.re
const re = exports.re = [] const re = exports.re = []
const safeRe = exports.safeRe = []
const src = exports.src = [] const src = exports.src = []
const t = exports.t = {} const t = exports.t = {}
let R = 0 let R = 0
const LETTERDASHNUMBER = '[a-zA-Z0-9-]'
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
// used internally via the safeRe object since all inputs in this library get
// normalized first to trim and collapse all extra whitespace. The original
// regexes are exported for userland consumption and lower level usage. A
// future breaking change could export the safer regex only with a note that
// all input should have extra whitespace removed.
const safeRegexReplacements = [
['\\s', 1],
['\\d', MAX_LENGTH],
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
]
const makeSafeRegex = (value) => {
for (const [token, max] of safeRegexReplacements) {
value = value
.split(`${token}*`).join(`${token}{0,${max}}`)
.split(`${token}+`).join(`${token}{1,${max}}`)
}
return value
}
const createToken = (name, value, isGlobal) => { const createToken = (name, value, isGlobal) => {
const safe = makeSafeRegex(value)
const index = R++ const index = R++
debug(name, index, value) debug(name, index, value)
t[name] = index t[name] = index
src[index] = value src[index] = value
re[index] = new RegExp(value, isGlobal ? 'g' : undefined) re[index] = new RegExp(value, isGlobal ? 'g' : undefined)
safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined)
} }
// The following Regular Expressions can be used for tokenizing, // The following Regular Expressions can be used for tokenizing,
@ -23,13 +53,13 @@ const createToken = (name, value, isGlobal) => {
// A single `0`, or a non-zero digit followed by zero or more digits. // A single `0`, or a non-zero digit followed by zero or more digits.
createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*')
createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') createToken('NUMERICIDENTIFIERLOOSE', '\\d+')
// ## Non-numeric Identifier // ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or // Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens. // more letters, digits, or hyphens.
createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`)
// ## Main Version // ## Main Version
// Three dot-separated numeric identifiers. // Three dot-separated numeric identifiers.
@ -64,7 +94,7 @@ createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]
// ## Build Metadata Identifier // ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens. // Any combination of digits, letters, or hyphens.
createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`)
// ## Build Metadata // ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata // Plus sign, followed by one or more period-separated build metadata
@ -124,12 +154,17 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion. // Coercion.
// Extract anything that could conceivably be a part of a valid semver // Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' + createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`) `(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true) createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"

@ -1,26 +1,27 @@
{ {
"name": "semver", "name": "semver",
"version": "7.3.8", "version": "7.6.3",
"description": "The semantic version parser used by npm.", "description": "The semantic version parser used by npm.",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"test": "tap", "test": "tap",
"snap": "tap", "snap": "tap",
"lint": "eslint \"**/*.js\"", "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"postlint": "template-oss-check", "postlint": "template-oss-check",
"lintfix": "npm run lint -- --fix", "lintfix": "npm run lint -- --fix",
"posttest": "npm run lint", "posttest": "npm run lint",
"template-oss-apply": "template-oss-apply --force" "template-oss-apply": "template-oss-apply --force"
}, },
"devDependencies": { "devDependencies": {
"@npmcli/eslint-config": "^3.0.1", "@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.4.4", "@npmcli/template-oss": "4.22.0",
"benchmark": "^2.1.4",
"tap": "^16.0.0" "tap": "^16.0.0"
}, },
"license": "ISC", "license": "ISC",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/npm/node-semver.git" "url": "git+https://github.com/npm/node-semver.git"
}, },
"bin": { "bin": {
"semver": "bin/semver.js" "semver": "bin/semver.js"
@ -37,7 +38,7 @@
"range.bnf" "range.bnf"
], ],
"tap": { "tap": {
"check-coverage": true, "timeout": 30,
"coverage-map": "map.js", "coverage-map": "map.js",
"nyc-arg": [ "nyc-arg": [
"--exclude", "--exclude",
@ -47,23 +48,11 @@
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },
"dependencies": {
"lru-cache": "^6.0.0"
},
"author": "GitHub Inc.", "author": "GitHub Inc.",
"templateOSS": { "templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.4.4", "version": "4.22.0",
"engines": ">=10", "engines": ">=10",
"content": "./scripts",
"ciVersions": [
"10.0.0",
"10.x",
"12.x",
"14.x",
"16.x",
"18.x"
],
"distPaths": [ "distPaths": [
"classes/", "classes/",
"functions/", "functions/",
@ -80,7 +69,9 @@
"/ranges/", "/ranges/",
"/index.js", "/index.js",
"/preload.js", "/preload.js",
"/range.bnf" "/range.bnf",
] "/benchmarks"
],
"publish": "true"
} }
} }

@ -2,6 +2,6 @@ const Range = require('../classes/range')
const intersects = (r1, r2, options) => { const intersects = (r1, r2, options) => {
r1 = new Range(r1, options) r1 = new Range(r1, options)
r2 = new Range(r2, options) r2 = new Range(r2, options)
return r1.intersects(r2) return r1.intersects(r2, options)
} }
module.exports = intersects module.exports = intersects

@ -68,6 +68,9 @@ const subset = (sub, dom, options = {}) => {
return true return true
} }
const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')]
const minimumVersion = [new Comparator('>=0.0.0')]
const simpleSubset = (sub, dom, options) => { const simpleSubset = (sub, dom, options) => {
if (sub === dom) { if (sub === dom) {
return true return true
@ -77,9 +80,9 @@ const simpleSubset = (sub, dom, options) => {
if (dom.length === 1 && dom[0].semver === ANY) { if (dom.length === 1 && dom[0].semver === ANY) {
return true return true
} else if (options.includePrerelease) { } else if (options.includePrerelease) {
sub = [new Comparator('>=0.0.0-0')] sub = minimumVersionWithPreRelease
} else { } else {
sub = [new Comparator('>=0.0.0')] sub = minimumVersion
} }
} }
@ -87,7 +90,7 @@ const simpleSubset = (sub, dom, options) => {
if (options.includePrerelease) { if (options.includePrerelease) {
return true return true
} else { } else {
dom = [new Comparator('>=0.0.0')] dom = minimumVersion
} }
} }

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

@ -1,204 +0,0 @@
# yallist
Yet Another Linked List
There are many doubly-linked list implementations like it, but this
one is mine.
For when an array would be too big, and a Map can't be iterated in
reverse order.
[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
## basic usage
```javascript
var yallist = require('yallist')
var myList = yallist.create([1, 2, 3])
myList.push('foo')
myList.unshift('bar')
// of course pop() and shift() are there, too
console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
myList.forEach(function (k) {
// walk the list head to tail
})
myList.forEachReverse(function (k, index, list) {
// walk the list tail to head
})
var myDoubledList = myList.map(function (k) {
return k + k
})
// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
// mapReverse is also a thing
var myDoubledListReverse = myList.mapReverse(function (k) {
return k + k
}) // ['foofoo', 6, 4, 2, 'barbar']
var reduced = myList.reduce(function (set, entry) {
set += entry
return set
}, 'start')
console.log(reduced) // 'startfoo123bar'
```
## api
The whole API is considered "public".
Functions with the same name as an Array method work more or less the
same way.
There's reverse versions of most things because that's the point.
### Yallist
Default export, the class that holds and manages a list.
Call it with either a forEach-able (like an array) or a set of
arguments, to initialize the list.
The Array-ish methods all act like you'd expect. No magic length,
though, so if you change that it won't automatically prune or add
empty spots.
### Yallist.create(..)
Alias for Yallist function. Some people like factories.
#### yallist.head
The first node in the list
#### yallist.tail
The last node in the list
#### yallist.length
The number of nodes in the list. (Change this at your peril. It is
not magic like Array length.)
#### yallist.toArray()
Convert the list to an array.
#### yallist.forEach(fn, [thisp])
Call a function on each item in the list.
#### yallist.forEachReverse(fn, [thisp])
Call a function on each item in the list, in reverse order.
#### yallist.get(n)
Get the data at position `n` in the list. If you use this a lot,
probably better off just using an Array.
#### yallist.getReverse(n)
Get the data at position `n`, counting from the tail.
#### yallist.map(fn, thisp)
Create a new Yallist with the result of calling the function on each
item.
#### yallist.mapReverse(fn, thisp)
Same as `map`, but in reverse.
#### yallist.pop()
Get the data from the list tail, and remove the tail from the list.
#### yallist.push(item, ...)
Insert one or more items to the tail of the list.
#### yallist.reduce(fn, initialValue)
Like Array.reduce.
#### yallist.reduceReverse
Like Array.reduce, but in reverse.
#### yallist.reverse
Reverse the list in place.
#### yallist.shift()
Get the data from the list head, and remove the head from the list.
#### yallist.slice([from], [to])
Just like Array.slice, but returns a new Yallist.
#### yallist.sliceReverse([from], [to])
Just like yallist.slice, but the result is returned in reverse.
#### yallist.toArray()
Create an array representation of the list.
#### yallist.toArrayReverse()
Create a reversed array representation of the list.
#### yallist.unshift(item, ...)
Insert one or more items to the head of the list.
#### yallist.unshiftNode(node)
Move a Node object to the front of the list. (That is, pull it out of
wherever it lives, and make it the new head.)
If the node belongs to a different list, then that list will remove it
first.
#### yallist.pushNode(node)
Move a Node object to the end of the list. (That is, pull it out of
wherever it lives, and make it the new tail.)
If the node belongs to a list already, then that list will remove it
first.
#### yallist.removeNode(node)
Remove a node from the list, preserving referential integrity of head
and tail and other nodes.
Will throw an error if you try to have a list remove a node that
doesn't belong to it.
### Yallist.Node
The class that holds the data and is actually the list.
Call with `var n = new Node(value, previousNode, nextNode)`
Note that if you do direct operations on Nodes themselves, it's very
easy to get into weird states where the list is broken. Be careful :)
#### node.next
The next node in the list.
#### node.prev
The previous node in the list.
#### node.value
The data the node contains.
#### node.list
The list to which this node belongs. (Null if it does not belong to
any list.)

@ -1,8 +0,0 @@
'use strict'
module.exports = function (Yallist) {
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) {
yield walker.value
}
}
}

@ -1,29 +0,0 @@
{
"name": "yallist",
"version": "4.0.0",
"description": "Yet Another Linked List",
"main": "yallist.js",
"directories": {
"test": "test"
},
"files": [
"yallist.js",
"iterator.js"
],
"dependencies": {},
"devDependencies": {
"tap": "^12.1.0"
},
"scripts": {
"test": "tap test/*.js --100",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
},
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC"
}

@ -1,426 +0,0 @@
'use strict'
module.exports = Yallist
Yallist.Node = Node
Yallist.create = Yallist
function Yallist (list) {
var self = this
if (!(self instanceof Yallist)) {
self = new Yallist()
}
self.tail = null
self.head = null
self.length = 0
if (list && typeof list.forEach === 'function') {
list.forEach(function (item) {
self.push(item)
})
} else if (arguments.length > 0) {
for (var i = 0, l = arguments.length; i < l; i++) {
self.push(arguments[i])
}
}
return self
}
Yallist.prototype.removeNode = function (node) {
if (node.list !== this) {
throw new Error('removing node which does not belong to this list')
}
var next = node.next
var prev = node.prev
if (next) {
next.prev = prev
}
if (prev) {
prev.next = next
}
if (node === this.head) {
this.head = next
}
if (node === this.tail) {
this.tail = prev
}
node.list.length--
node.next = null
node.prev = null
node.list = null
return next
}
Yallist.prototype.unshiftNode = function (node) {
if (node === this.head) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var head = this.head
node.list = this
node.next = head
if (head) {
head.prev = node
}
this.head = node
if (!this.tail) {
this.tail = node
}
this.length++
}
Yallist.prototype.pushNode = function (node) {
if (node === this.tail) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var tail = this.tail
node.list = this
node.prev = tail
if (tail) {
tail.next = node
}
this.tail = node
if (!this.head) {
this.head = node
}
this.length++
}
Yallist.prototype.push = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
push(this, arguments[i])
}
return this.length
}
Yallist.prototype.unshift = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
unshift(this, arguments[i])
}
return this.length
}
Yallist.prototype.pop = function () {
if (!this.tail) {
return undefined
}
var res = this.tail.value
this.tail = this.tail.prev
if (this.tail) {
this.tail.next = null
} else {
this.head = null
}
this.length--
return res
}
Yallist.prototype.shift = function () {
if (!this.head) {
return undefined
}
var res = this.head.value
this.head = this.head.next
if (this.head) {
this.head.prev = null
} else {
this.tail = null
}
this.length--
return res
}
Yallist.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.head, i = 0; walker !== null; i++) {
fn.call(thisp, walker.value, i, this)
walker = walker.next
}
}
Yallist.prototype.forEachReverse = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
fn.call(thisp, walker.value, i, this)
walker = walker.prev
}
}
Yallist.prototype.get = function (n) {
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.next
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.getReverse = function (n) {
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.prev
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.map = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.head; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.next
}
return res
}
Yallist.prototype.mapReverse = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.tail; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.prev
}
return res
}
Yallist.prototype.reduce = function (fn, initial) {
var acc
var walker = this.head
if (arguments.length > 1) {
acc = initial
} else if (this.head) {
walker = this.head.next
acc = this.head.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = 0; walker !== null; i++) {
acc = fn(acc, walker.value, i)
walker = walker.next
}
return acc
}
Yallist.prototype.reduceReverse = function (fn, initial) {
var acc
var walker = this.tail
if (arguments.length > 1) {
acc = initial
} else if (this.tail) {
walker = this.tail.prev
acc = this.tail.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = this.length - 1; walker !== null; i--) {
acc = fn(acc, walker.value, i)
walker = walker.prev
}
return acc
}
Yallist.prototype.toArray = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.head; walker !== null; i++) {
arr[i] = walker.value
walker = walker.next
}
return arr
}
Yallist.prototype.toArrayReverse = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.tail; walker !== null; i++) {
arr[i] = walker.value
walker = walker.prev
}
return arr
}
Yallist.prototype.slice = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
walker = walker.next
}
for (; walker !== null && i < to; i++, walker = walker.next) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.sliceReverse = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
walker = walker.prev
}
for (; walker !== null && i > from; i--, walker = walker.prev) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 0; i < nodes.length; i++) {
walker = insert(this, walker, nodes[i])
}
return ret;
}
Yallist.prototype.reverse = function () {
var head = this.head
var tail = this.tail
for (var walker = head; walker !== null; walker = walker.prev) {
var p = walker.prev
walker.prev = walker.next
walker.next = p
}
this.head = tail
this.tail = head
return this
}
function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) {
self.tail = new Node(item, self.tail, null, self)
if (!self.head) {
self.head = self.tail
}
self.length++
}
function unshift (self, item) {
self.head = new Node(item, null, self.head, self)
if (!self.tail) {
self.tail = self.head
}
self.length++
}
function Node (value, prev, next, list) {
if (!(this instanceof Node)) {
return new Node(value, prev, next, list)
}
this.list = list
this.value = value
if (prev) {
prev.next = this
this.prev = prev
} else {
this.prev = null
}
if (next) {
next.prev = this
this.next = next
} else {
this.next = null
}
}
try {
// add if support for Symbol.iterator is present
require('./iterator.js')(Yallist)
} catch (er) {}

@ -1,7 +1,7 @@
{ {
"name": "@mapbox/node-pre-gyp", "name": "@mapbox/node-pre-gyp",
"description": "Node.js native addon binary install tool", "description": "Node.js native addon binary install tool",
"version": "1.0.10", "version": "1.0.11",
"keywords": [ "keywords": [
"native", "native",
"addon", "addon",

@ -10,34 +10,50 @@ on:
jobs: jobs:
build: build:
runs-on: ubuntu-22.04
strategy: strategy:
matrix: matrix:
node-version: [14.x, 16.x, 18.x] os: [ubuntu-20.04, macos-11.0, windows-2019]
nodeVersion: [14, 16, 18, 20]
runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.nodeVersion }}
uses: actions/setup-node@v1 uses: actions/setup-node@v3
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.nodeVersion }}
- name: Install dependencies
run: |
sudo apt-get install -y python3 make g++
- name: Test - name: Test
run: npm test run: npm test
- name: Package
if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')
run: npx node-pre-gyp package
- name: Upload
uses: actions/upload-artifact@v3
if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master'))
with:
name: bcrypt-lib-${{ matrix.os }}-${{ matrix.nodeVersion }}
path: build/stage/**/bcrypt_lib*.tar.gz
build-alpine: build-alpine:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
node-version: [14, 16, 18] nodeVersion: [14, 16, 18, 20]
container: container:
image: node:${{ matrix.node-version }}-alpine image: node:${{ matrix.nodeVersion }}-alpine
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Install dependencies - name: Install dependencies
run: | run: |
apk add make g++ python3 apk add make g++ python3
- name: Test - name: Test
run: | run: |
npm test --unsafe-perm npm test --unsafe-perm
- name: Package
if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')
run: npx node-pre-gyp package --unsafe-perm
- name: Upload
if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master'))
uses: actions/upload-artifact@v3
with:
name: bcrypt-lib-alpine-${{ matrix.nodeVersion }}
path: build/stage/**/bcrypt_lib*.tar.gz

3
node_modules/bcrypt/CHANGELOG.md generated vendored

@ -1,3 +1,6 @@
# 5.1.0 (2022-10-06)
* Update `node-pre-gyp` to 1.0.11
# 5.1.0 (2022-10-06) # 5.1.0 (2022-10-06)
* Update `node-pre-gyp` to 1.0.10 * Update `node-pre-gyp` to 1.0.10
* Replace `nodeunit` with `jest` as the testing library * Replace `nodeunit` with `jest` as the testing library

Binary file not shown.

6
node_modules/bcrypt/binding.gyp generated vendored

@ -1,4 +1,7 @@
{ {
"variables": {
"NODE_VERSION%":"<!(node -p \"process.versions.node.split(\\\".\\\")[0]\")"
},
'targets': [ 'targets': [
{ {
'target_name': 'bcrypt_lib', 'target_name': 'bcrypt_lib',
@ -35,10 +38,11 @@
'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
} }
}], }],
[ 'OS=="zos"', { ['OS=="zos" and NODE_VERSION <= 16',{
'cflags': [ 'cflags': [
'-qascii', '-qascii',
], ],
'defines': ["NAPI_DISABLE_CPP_EXCEPTIONS"],
}], }],
], ],
}, },

@ -1,21 +1,28 @@
var bcrypt = require('../bcrypt'); var bcrypt = require('../bcrypt');
var start = Date.now(); (async () => {
bcrypt.genSalt(10, function(err, salt) { const start = Date.now();
// genSalt
const salt = await bcrypt.genSalt(10)
console.log('salt: ' + salt); console.log('salt: ' + salt);
console.log('salt cb end: ' + (Date.now() - start) + 'ms'); console.log('salt cb end: ' + (Date.now() - start) + 'ms');
bcrypt.hash('test', salt, function(err, crypted) {
// hash
const crypted = await bcrypt.hash('test', salt)
console.log('crypted: ' + crypted); console.log('crypted: ' + crypted);
console.log('crypted cb end: ' + (Date.now() - start) + 'ms'); console.log('crypted cb end: ' + (Date.now() - start) + 'ms');
console.log('rounds used from hash:', bcrypt.getRounds(crypted)); console.log('rounds used from hash:', bcrypt.getRounds(crypted));
bcrypt.compare('test', crypted, function(err, res) {
// compare
const res = await bcrypt.compare('test', crypted)
console.log('compared true: ' + res); console.log('compared true: ' + res);
console.log('compared true cb end: ' + (Date.now() - start) + 'ms'); console.log('compared true cb end: ' + (Date.now() - start) + 'ms');
});
bcrypt.compare('bacon', crypted, function(err, res) { // compare
const res = await bcrypt.compare('bacon', crypted)
console.log('compared false: ' + res); console.log('compared false: ' + res);
console.log('compared false cb end: ' + (Date.now() - start) + 'ms'); console.log('compared false cb end: ' + (Date.now() - start) + 'ms');
});
});
})
console.log('end: ' + (Date.now() - start) + 'ms'); console.log('end: ' + (Date.now() - start) + 'ms');
})();

Binary file not shown.

6
node_modules/bcrypt/package.json generated vendored

@ -11,7 +11,7 @@
"crypto" "crypto"
], ],
"main": "./bcrypt", "main": "./bcrypt",
"version": "5.1.0", "version": "5.1.1",
"author": "Nick Campbell (https://github.com/ncb000gt)", "author": "Nick Campbell (https://github.com/ncb000gt)",
"engines": { "engines": {
"node": ">= 10.0.0" "node": ">= 10.0.0"
@ -29,11 +29,11 @@
"install": "node-pre-gyp install --fallback-to-build" "install": "node-pre-gyp install --fallback-to-build"
}, },
"dependencies": { "dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10", "@mapbox/node-pre-gyp": "^1.0.11",
"node-addon-api": "^5.0.0" "node-addon-api": "^5.0.0"
}, },
"devDependencies": { "devDependencies": {
"jest": "^29.1.2" "jest": "^29.6.2"
}, },
"contributors": [ "contributors": [
"Antonio Salazar Cardozo <savedfastcool@gmail.com> (https://github.com/Shadowfiend)", "Antonio Salazar Cardozo <savedfastcool@gmail.com> (https://github.com/Shadowfiend)",

@ -1,12 +0,0 @@
#!/bin/sh
set -xe
echo "Running on $(node -v)"
apk add make g++ python
npm test --unsafe-perm
./node_modules/.bin/node-pre-gyp configure
./node_modules/.bin/node-pre-gyp build
./node_modules/.bin/node-pre-gyp package

@ -8,6 +8,9 @@ Currently supports detection of GNU glibc and MUSL libc.
Provides asychronous and synchronous functions for the Provides asychronous and synchronous functions for the
family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`).
The version numbers of libc implementations
are not guaranteed to be semver-compliant.
For previous v1.x releases, please see the For previous v1.x releases, please see the
[v1](https://github.com/lovell/detect-libc/tree/v1) branch. [v1](https://github.com/lovell/detect-libc/tree/v1) branch.
@ -147,7 +150,7 @@ if (isNonGlibcLinuxSync()) { ... }
## Licensing ## Licensing
Copyright 2017, 2022 Lovell Fuller Copyright 2017 Lovell Fuller and others.
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

@ -1,3 +1,6 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
export const GLIBC: 'glibc'; export const GLIBC: 'glibc';
export const MUSL: 'musl'; export const MUSL: 'musl';

@ -1,7 +1,14 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict'; 'use strict';
const childProcess = require('child_process'); const childProcess = require('child_process');
const { isLinux, getReport } = require('./process'); const { isLinux, getReport } = require('./process');
const { LDD_PATH, readFile, readFileSync } = require('./filesystem');
let cachedFamilyFilesystem;
let cachedVersionFilesystem;
const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true';
let commandOut = ''; let commandOut = '';
@ -36,6 +43,12 @@ const safeCommandSync = () => {
*/ */
const GLIBC = 'glibc'; const GLIBC = 'glibc';
/**
* A Regexp constant to get the GLIBC Version.
* @type {string}
*/
const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i;
/** /**
* A String constant containing the value `musl`. * A String constant containing the value `musl`.
* @type {string} * @type {string}
@ -69,6 +82,40 @@ const familyFromCommand = (out) => {
return null; return null;
}; };
const getFamilyFromLddContent = (content) => {
if (content.includes('musl')) {
return MUSL;
}
if (content.includes('GNU C Library')) {
return GLIBC;
}
return null;
};
const familyFromFilesystem = async () => {
if (cachedFamilyFilesystem !== undefined) {
return cachedFamilyFilesystem;
}
cachedFamilyFilesystem = null;
try {
const lddContent = await readFile(LDD_PATH);
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent);
} catch (e) {}
return cachedFamilyFilesystem;
};
const familyFromFilesystemSync = () => {
if (cachedFamilyFilesystem !== undefined) {
return cachedFamilyFilesystem;
}
cachedFamilyFilesystem = null;
try {
const lddContent = readFileSync(LDD_PATH);
cachedFamilyFilesystem = getFamilyFromLddContent(lddContent);
} catch (e) {}
return cachedFamilyFilesystem;
};
/** /**
* Resolves with the libc family when it can be determined, `null` otherwise. * Resolves with the libc family when it can be determined, `null` otherwise.
* @returns {Promise<?string>} * @returns {Promise<?string>}
@ -76,7 +123,10 @@ const familyFromCommand = (out) => {
const family = async () => { const family = async () => {
let family = null; let family = null;
if (isLinux()) { if (isLinux()) {
family = await familyFromFilesystem();
if (!family) {
family = familyFromReport(); family = familyFromReport();
}
if (!family) { if (!family) {
const out = await safeCommand(); const out = await safeCommand();
family = familyFromCommand(out); family = familyFromCommand(out);
@ -92,7 +142,10 @@ const family = async () => {
const familySync = () => { const familySync = () => {
let family = null; let family = null;
if (isLinux()) { if (isLinux()) {
family = familyFromFilesystemSync();
if (!family) {
family = familyFromReport(); family = familyFromReport();
}
if (!family) { if (!family) {
const out = safeCommandSync(); const out = safeCommandSync();
family = familyFromCommand(out); family = familyFromCommand(out);
@ -113,6 +166,36 @@ const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC;
*/ */
const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC;
const versionFromFilesystem = async () => {
if (cachedVersionFilesystem !== undefined) {
return cachedVersionFilesystem;
}
cachedVersionFilesystem = null;
try {
const lddContent = await readFile(LDD_PATH);
const versionMatch = lddContent.match(RE_GLIBC_VERSION);
if (versionMatch) {
cachedVersionFilesystem = versionMatch[1];
}
} catch (e) {}
return cachedVersionFilesystem;
};
const versionFromFilesystemSync = () => {
if (cachedVersionFilesystem !== undefined) {
return cachedVersionFilesystem;
}
cachedVersionFilesystem = null;
try {
const lddContent = readFileSync(LDD_PATH);
const versionMatch = lddContent.match(RE_GLIBC_VERSION);
if (versionMatch) {
cachedVersionFilesystem = versionMatch[1];
}
} catch (e) {}
return cachedVersionFilesystem;
};
const versionFromReport = () => { const versionFromReport = () => {
const report = getReport(); const report = getReport();
if (report.header && report.header.glibcVersionRuntime) { if (report.header && report.header.glibcVersionRuntime) {
@ -141,7 +224,10 @@ const versionFromCommand = (out) => {
const version = async () => { const version = async () => {
let version = null; let version = null;
if (isLinux()) { if (isLinux()) {
version = await versionFromFilesystem();
if (!version) {
version = versionFromReport(); version = versionFromReport();
}
if (!version) { if (!version) {
const out = await safeCommand(); const out = await safeCommand();
version = versionFromCommand(out); version = versionFromCommand(out);
@ -157,7 +243,10 @@ const version = async () => {
const versionSync = () => { const versionSync = () => {
let version = null; let version = null;
if (isLinux()) { if (isLinux()) {
version = versionFromFilesystemSync();
if (!version) {
version = versionFromReport(); version = versionFromReport();
}
if (!version) { if (!version) {
const out = safeCommandSync(); const out = safeCommandSync();
version = versionFromCommand(out); version = versionFromCommand(out);

@ -1,3 +1,6 @@
// Copyright 2017 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict'; 'use strict';
const isLinux = () => process.platform === 'linux'; const isLinux = () => process.platform === 'linux';
@ -6,9 +9,14 @@ let report = null;
const getReport = () => { const getReport = () => {
if (!report) { if (!report) {
/* istanbul ignore next */ /* istanbul ignore next */
report = isLinux() && process.report if (isLinux() && process.report) {
? process.report.getReport() const orig = process.report.excludeNetwork;
: {}; process.report.excludeNetwork = true;
report = process.report.getReport();
process.report.excludeNetwork = orig;
} else {
report = {};
}
} }
return report; return report;
}; };

@ -1,6 +1,6 @@
{ {
"name": "detect-libc", "name": "detect-libc",
"version": "2.0.1", "version": "2.0.3",
"description": "Node.js module to detect the C standard library (libc) implementation family and version", "description": "Node.js module to detect the C standard library (libc) implementation family and version",
"main": "lib/detect-libc.js", "main": "lib/detect-libc.js",
"files": [ "files": [
@ -8,7 +8,9 @@
"index.d.ts" "index.d.ts"
], ],
"scripts": { "scripts": {
"test": "semistandard && nyc --reporter=lcov --check-coverage --branches=100 ava test/unit.js" "test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js",
"bench": "node benchmark/detect-libc",
"bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -21,11 +23,13 @@
], ],
"author": "Lovell Fuller <npm@lovell.info>", "author": "Lovell Fuller <npm@lovell.info>",
"contributors": [ "contributors": [
"Niklas Salmoukas <niklas@salmoukas.com>" "Niklas Salmoukas <niklas@salmoukas.com>",
"Vinícius Lourenço <vinyygamerlol@gmail.com>"
], ],
"license": "Apache-2.0", "license": "Apache-2.0",
"devDependencies": { "devDependencies": {
"ava": "^2.4.0", "ava": "^2.4.0",
"benchmark": "^2.1.4",
"nyc": "^15.1.0", "nyc": "^15.1.0",
"proxyquire": "^2.1.3", "proxyquire": "^2.1.3",
"semistandard": "^14.2.3" "semistandard": "^14.2.3"

2
node_modules/minipass/LICENSE generated vendored

@ -1,6 +1,6 @@
The ISC License The ISC License
Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above

495
node_modules/minipass/README.md generated vendored

@ -4,35 +4,37 @@ A _very_ minimal implementation of a [PassThrough
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
[It's very [It's very
fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing)
for objects, strings, and buffers. for objects, strings, and buffers.
Supports `pipe()`ing (including multi-`pipe()` and backpressure transmission), Supports `pipe()`ing (including multi-`pipe()` and backpressure
buffering data until either a `data` event handler or `pipe()` is added (so transmission), buffering data until either a `data` event handler
you don't lose the first chunk), and most other cases where PassThrough is or `pipe()` is added (so you don't lose the first chunk), and
a good idea. most other cases where PassThrough is a good idea.
There is a `read()` method, but it's much more efficient to consume data There is a `read()` method, but it's much more efficient to
from this stream via `'data'` events or by calling `pipe()` into some other consume data from this stream via `'data'` events or by calling
stream. Calling `read()` requires the buffer to be flattened in some `pipe()` into some other stream. Calling `read()` requires the
cases, which requires copying memory. buffer to be flattened in some cases, which requires copying
memory.
If you set `objectMode: true` in the options, then whatever is written will If you set `objectMode: true` in the options, then whatever is
be emitted. Otherwise, it'll do a minimal amount of Buffer copying to written will be emitted. Otherwise, it'll do a minimal amount of
ensure proper Streams semantics when `read(n)` is called. Buffer copying to ensure proper Streams semantics when `read(n)`
is called.
`objectMode` can also be set by doing `stream.objectMode = true`, or by `objectMode` can also be set by doing `stream.objectMode = true`,
writing any non-string/non-buffer data. `objectMode` cannot be set to or by writing any non-string/non-buffer data. `objectMode` cannot
false once it is set. be set to false once it is set.
This is not a `through` or `through2` stream. It doesn't transform the This is not a `through` or `through2` stream. It doesn't
data, it just passes it right through. If you want to transform the data, transform the data, it just passes it right through. If you want
extend the class, and override the `write()` method. Once you're done to transform the data, extend the class, and override the
transforming the data however you want, call `super.write()` with the `write()` method. Once you're done transforming the data however
transform output. you want, call `super.write()` with the transform output.
For some examples of streams that extend Minipass in various ways, check For some examples of streams that extend Minipass in various
out: ways, check out:
- [minizlib](http://npm.im/minizlib) - [minizlib](http://npm.im/minizlib)
- [fs-minipass](http://npm.im/fs-minipass) - [fs-minipass](http://npm.im/fs-minipass)
@ -54,11 +56,11 @@ out:
## Differences from Node.js Streams ## Differences from Node.js Streams
There are several things that make Minipass streams different from (and in There are several things that make Minipass streams different
some ways superior to) Node.js core streams. from (and in some ways superior to) Node.js core streams.
Please read these caveats if you are familiar with node-core streams and Please read these caveats if you are familiar with node-core
intend to use Minipass streams in your programs. streams and intend to use Minipass streams in your programs.
You can avoid most of these differences entirely (for a very You can avoid most of these differences entirely (for a very
small performance penalty) by setting `{async: true}` in the small performance penalty) by setting `{async: true}` in the
@ -66,28 +68,35 @@ constructor options.
### Timing ### Timing
Minipass streams are designed to support synchronous use-cases. Thus, data Minipass streams are designed to support synchronous use-cases.
is emitted as soon as it is available, always. It is buffered until read, Thus, data is emitted as soon as it is available, always. It is
but no longer. Another way to look at it is that Minipass streams are buffered until read, but no longer. Another way to look at it is
exactly as synchronous as the logic that writes into them. that Minipass streams are exactly as synchronous as the logic
that writes into them.
This can be surprising if your code relies on `PassThrough.write()` always This can be surprising if your code relies on
providing data on the next tick rather than the current one, or being able `PassThrough.write()` always providing data on the next tick
to call `resume()` and not have the entire buffer disappear immediately. rather than the current one, or being able to call `resume()` and
not have the entire buffer disappear immediately.
However, without this synchronicity guarantee, there would be no way for However, without this synchronicity guarantee, there would be no
Minipass to achieve the speeds it does, or support the synchronous use way for Minipass to achieve the speeds it does, or support the
cases that it does. Simply put, waiting takes time. synchronous use cases that it does. Simply put, waiting takes
time.
This non-deferring approach makes Minipass streams much easier to reason This non-deferring approach makes Minipass streams much easier to
about, especially in the context of Promises and other flow-control reason about, especially in the context of Promises and other
mechanisms. flow-control mechanisms.
Example: Example:
```js ```js
const Minipass = require('minipass') // hybrid module, either works
const stream = new Minipass({ async: true }) import { Minipass } from 'minipass'
// or:
const { Minipass } = require('minipass')
const stream = new Minipass()
stream.on('data', () => console.log('data event')) stream.on('data', () => console.log('data event'))
console.log('before write') console.log('before write')
stream.write('hello') stream.write('hello')
@ -106,7 +115,11 @@ async mode either by setting `async: true` in the constructor
options, or by setting `stream.async = true` later on. options, or by setting `stream.async = true` later on.
```js ```js
const Minipass = require('minipass') // hybrid module, either works
import { Minipass } from 'minipass'
// or:
const { Minipass } = require('minipass')
const asyncStream = new Minipass({ async: true }) const asyncStream = new Minipass({ async: true })
asyncStream.on('data', () => console.log('data event')) asyncStream.on('data', () => console.log('data event'))
console.log('before write') console.log('before write')
@ -122,7 +135,7 @@ Switching _out_ of async mode is unsafe, as it could cause data
corruption, and so is not enabled. Example: corruption, and so is not enabled. Example:
```js ```js
const Minipass = require('minipass') import { Minipass } from 'minipass'
const stream = new Minipass({ encoding: 'utf8' }) const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk)) stream.on('data', chunk => console.log(chunk))
stream.async = true stream.async = true
@ -143,7 +156,7 @@ To avoid this problem, once set into async mode, any attempt to
make the stream sync again will be ignored. make the stream sync again will be ignored.
```js ```js
const Minipass = require('minipass') const { Minipass } = require('minipass')
const stream = new Minipass({ encoding: 'utf8' }) const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk)) stream.on('data', chunk => console.log(chunk))
stream.async = true stream.async = true
@ -161,28 +174,30 @@ console.log('after writes')
### No High/Low Water Marks ### No High/Low Water Marks
Node.js core streams will optimistically fill up a buffer, returning `true` Node.js core streams will optimistically fill up a buffer,
on all writes until the limit is hit, even if the data has nowhere to go. returning `true` on all writes until the limit is hit, even if
Then, they will not attempt to draw more data in until the buffer size dips the data has nowhere to go. Then, they will not attempt to draw
below a minimum value. more data in until the buffer size dips below a minimum value.
Minipass streams are much simpler. The `write()` method will return `true` Minipass streams are much simpler. The `write()` method will
if the data has somewhere to go (which is to say, given the timing return `true` if the data has somewhere to go (which is to say,
guarantees, that the data is already there by the time `write()` returns). given the timing guarantees, that the data is already there by
the time `write()` returns).
If the data has nowhere to go, then `write()` returns false, and the data If the data has nowhere to go, then `write()` returns false, and
sits in a buffer, to be drained out immediately as soon as anyone consumes the data sits in a buffer, to be drained out immediately as soon
it. as anyone consumes it.
Since nothing is ever buffered unnecessarily, there is much less Since nothing is ever buffered unnecessarily, there is much less
copying data, and less bookkeeping about buffer capacity levels. copying data, and less bookkeeping about buffer capacity levels.
### Hazards of Buffering (or: Why Minipass Is So Fast) ### Hazards of Buffering (or: Why Minipass Is So Fast)
Since data written to a Minipass stream is immediately written all the way Since data written to a Minipass stream is immediately written
through the pipeline, and `write()` always returns true/false based on all the way through the pipeline, and `write()` always returns
whether the data was fully flushed, backpressure is communicated true/false based on whether the data was fully flushed,
immediately to the upstream caller. This minimizes buffering. backpressure is communicated immediately to the upstream caller.
This minimizes buffering.
Consider this case: Consider this case:
@ -210,14 +225,15 @@ p4.on('data', () => console.log('made it through'))
p1.write(Buffer.alloc(2048)) // returns false p1.write(Buffer.alloc(2048)) // returns false
``` ```
Along the way, the data was buffered and deferred at each stage, and Along the way, the data was buffered and deferred at each stage,
multiple event deferrals happened, for an unblocked pipeline where it was and multiple event deferrals happened, for an unblocked pipeline
perfectly safe to write all the way through! where it was perfectly safe to write all the way through!
Furthermore, setting a `highWaterMark` of `1024` might lead someone reading Furthermore, setting a `highWaterMark` of `1024` might lead
the code to think an advisory maximum of 1KiB is being set for the someone reading the code to think an advisory maximum of 1KiB is
pipeline. However, the actual advisory buffering level is the _sum_ of being set for the pipeline. However, the actual advisory
`highWaterMark` values, since each one has its own bucket. buffering level is the _sum_ of `highWaterMark` values, since
each one has its own bucket.
Consider the Minipass case: Consider the Minipass case:
@ -242,42 +258,44 @@ m4.on('data', () => console.log('made it through'))
m1.write(Buffer.alloc(2048)) // returns true m1.write(Buffer.alloc(2048)) // returns true
``` ```
It is extremely unlikely that you _don't_ want to buffer any data written, It is extremely unlikely that you _don't_ want to buffer any data
or _ever_ buffer data that can be flushed all the way through. Neither written, or _ever_ buffer data that can be flushed all the way
node-core streams nor Minipass ever fail to buffer written data, but through. Neither node-core streams nor Minipass ever fail to
node-core streams do a lot of unnecessary buffering and pausing. buffer written data, but node-core streams do a lot of
unnecessary buffering and pausing.
As always, the faster implementation is the one that does less stuff and As always, the faster implementation is the one that does less
waits less time to do it. stuff and waits less time to do it.
### Immediately emit `end` for empty streams (when not paused) ### Immediately emit `end` for empty streams (when not paused)
If a stream is not paused, and `end()` is called before writing any data If a stream is not paused, and `end()` is called before writing
into it, then it will emit `end` immediately. any data into it, then it will emit `end` immediately.
If you have logic that occurs on the `end` event which you don't want to If you have logic that occurs on the `end` event which you don't
potentially happen immediately (for example, closing file descriptors, want to potentially happen immediately (for example, closing file
moving on to the next entry in an archive parse stream, etc.) then be sure descriptors, moving on to the next entry in an archive parse
to call `stream.pause()` on creation, and then `stream.resume()` once you stream, etc.) then be sure to call `stream.pause()` on creation,
are ready to respond to the `end` event. and then `stream.resume()` once you are ready to respond to the
`end` event.
However, this is _usually_ not a problem because: However, this is _usually_ not a problem because:
### Emit `end` When Asked ### Emit `end` When Asked
One hazard of immediately emitting `'end'` is that you may not yet have had One hazard of immediately emitting `'end'` is that you may not
a chance to add a listener. In order to avoid this hazard, Minipass yet have had a chance to add a listener. In order to avoid this
streams safely re-emit the `'end'` event if a new listener is added after hazard, Minipass streams safely re-emit the `'end'` event if a
`'end'` has been emitted. new listener is added after `'end'` has been emitted.
Ie, if you do `stream.on('end', someFunction)`, and the stream has already Ie, if you do `stream.on('end', someFunction)`, and the stream
emitted `end`, then it will call the handler right away. (You can think of has already emitted `end`, then it will call the handler right
this somewhat like attaching a new `.then(fn)` to a previously-resolved away. (You can think of this somewhat like attaching a new
Promise.) `.then(fn)` to a previously-resolved Promise.)
To prevent calling handlers multiple times who would not expect multiple To prevent calling handlers multiple times who would not expect
ends to occur, all listeners are removed from the `'end'` event whenever it multiple ends to occur, all listeners are removed from the
is emitted. `'end'` event whenever it is emitted.
### Emit `error` When Asked ### Emit `error` When Asked
@ -302,10 +320,11 @@ t.pipe(dest2)
t.write('foo') // goes to both destinations t.write('foo') // goes to both destinations
``` ```
Since Minipass streams _immediately_ process any pending data through the Since Minipass streams _immediately_ process any pending data
pipeline when a new pipe destination is added, this can have surprising through the pipeline when a new pipe destination is added, this
effects, especially when a stream comes in from some other function and may can have surprising effects, especially when a stream comes in
or may not have data in its buffer. from some other function and may or may not have data in its
buffer.
```js ```js
// WARNING! WILL LOSE DATA! // WARNING! WILL LOSE DATA!
@ -315,8 +334,8 @@ src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
src.pipe(dest2) // gets nothing! src.pipe(dest2) // gets nothing!
``` ```
One solution is to create a dedicated tee-stream junction that pipes to One solution is to create a dedicated tee-stream junction that
both locations, and then pipe to _that_ instead. pipes to both locations, and then pipe to _that_ instead.
```js ```js
// Safe example: tee to both places // Safe example: tee to both places
@ -328,9 +347,9 @@ tee.pipe(dest2)
src.pipe(tee) // tee gets 'foo', pipes to both locations src.pipe(tee) // tee gets 'foo', pipes to both locations
``` ```
The same caveat applies to `on('data')` event listeners. The first one The same caveat applies to `on('data')` event listeners. The
added will _immediately_ receive all of the data, leaving nothing for the first one added will _immediately_ receive all of the data,
second: leaving nothing for the second:
```js ```js
// WARNING! WILL LOSE DATA! // WARNING! WILL LOSE DATA!
@ -361,11 +380,11 @@ program logic.
## USAGE ## USAGE
It's a stream! Use it like a stream and it'll most likely do what you It's a stream! Use it like a stream and it'll most likely do what
want. you want.
```js ```js
const Minipass = require('minipass') import { Minipass } from 'minipass'
const mp = new Minipass(options) // optional: { encoding, objectMode } const mp = new Minipass(options) // optional: { encoding, objectMode }
mp.write('foo') mp.write('foo')
mp.pipe(someOtherStream) mp.pipe(someOtherStream)
@ -374,145 +393,165 @@ mp.end('bar')
### OPTIONS ### OPTIONS
* `encoding` How would you like the data coming _out_ of the stream to be - `encoding` How would you like the data coming _out_ of the
encoded? Accepts any values that can be passed to `Buffer.toString()`. stream to be encoded? Accepts any values that can be passed to
* `objectMode` Emit data exactly as it comes in. This will be flipped on `Buffer.toString()`.
by default if you write() something other than a string or Buffer at any - `objectMode` Emit data exactly as it comes in. This will be
point. Setting `objectMode: true` will prevent setting any encoding flipped on by default if you write() something other than a
value. string or Buffer at any point. Setting `objectMode: true` will
* `async` Defaults to `false`. Set to `true` to defer data prevent setting any encoding value.
- `async` Defaults to `false`. Set to `true` to defer data
emission until next tick. This reduces performance slightly, emission until next tick. This reduces performance slightly,
but makes Minipass streams use timing behavior closer to Node but makes Minipass streams use timing behavior closer to Node
core streams. See [Timing](#timing) for more details. core streams. See [Timing](#timing) for more details.
- `signal` An `AbortSignal` that will cause the stream to unhook
itself from everything and become as inert as possible. Note
that providing a `signal` parameter will make `'error'` events
no longer throw if they are unhandled, but they will still be
emitted to handlers if any are attached.
### API ### API
Implements the user-facing portions of Node.js's `Readable` and `Writable` Implements the user-facing portions of Node.js's `Readable` and
streams. `Writable` streams.
### Methods ### Methods
* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the - `write(chunk, [encoding], [callback])` - Put data in. (Note
base Minipass class, the same data will come out.) Returns `false` if that, in the base Minipass class, the same data will come out.)
the stream will buffer the next write, or true if it's still in "flowing" Returns `false` if the stream will buffer the next write, or
mode. true if it's still in "flowing" mode.
* `end([chunk, [encoding]], [callback])` - Signal that you have no more - `end([chunk, [encoding]], [callback])` - Signal that you have
data to write. This will queue an `end` event to be fired when all the no more data to write. This will queue an `end` event to be
data has been consumed. fired when all the data has been consumed.
* `setEncoding(encoding)` - Set the encoding for data coming of the stream. - `setEncoding(encoding)` - Set the encoding for data coming of
This can only be done once. the stream. This can only be done once.
* `pause()` - No more data for a while, please. This also prevents `end` - `pause()` - No more data for a while, please. This also
from being emitted for empty streams until the stream is resumed. prevents `end` from being emitted for empty streams until the
* `resume()` - Resume the stream. If there's data in the buffer, it is all stream is resumed.
discarded. Any buffered events are immediately emitted. - `resume()` - Resume the stream. If there's data in the buffer,
* `pipe(dest)` - Send all output to the stream provided. When it is all discarded. Any buffered events are immediately
emitted.
- `pipe(dest)` - Send all output to the stream provided. When
data is emitted, it is immediately written to any and all pipe data is emitted, it is immediately written to any and all pipe
destinations. (Or written on next tick in `async` mode.) destinations. (Or written on next tick in `async` mode.)
* `unpipe(dest)` - Stop piping to the destination stream. This - `unpipe(dest)` - Stop piping to the destination stream. This is
is immediate, meaning that any asynchronously queued data will immediate, meaning that any asynchronously queued data will
_not_ make it to the destination when running in `async` mode. _not_ make it to the destination when running in `async` mode.
* `options.end` - Boolean, end the destination stream when - `options.end` - Boolean, end the destination stream when the
the source stream ends. Default `true`. source stream ends. Default `true`.
* `options.proxyErrors` - Boolean, proxy `error` events from - `options.proxyErrors` - Boolean, proxy `error` events from
the source stream to the destination stream. Note that the source stream to the destination stream. Note that errors
errors are _not_ proxied after the pipeline terminates, are _not_ proxied after the pipeline terminates, either due
either due to the source emitting `'end'` or manually to the source emitting `'end'` or manually unpiping with
unpiping with `src.unpipe(dest)`. Default `false`. `src.unpipe(dest)`. Default `false`.
* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some - `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are
events are given special treatment, however. (See below under "events".) EventEmitters. Some events are given special treatment,
* `promise()` - Returns a Promise that resolves when the stream emits however. (See below under "events".)
`end`, or rejects if the stream emits `error`. - `promise()` - Returns a Promise that resolves when the stream
* `collect()` - Return a Promise that resolves on `end` with an array emits `end`, or rejects if the stream emits `error`.
containing each chunk of data that was emitted, or rejects if the stream - `collect()` - Return a Promise that resolves on `end` with an
emits `error`. Note that this consumes the stream data. array containing each chunk of data that was emitted, or
* `concat()` - Same as `collect()`, but concatenates the data into a single rejects if the stream emits `error`. Note that this consumes
Buffer object. Will reject the returned promise if the stream is in the stream data.
objectMode, or if it goes into objectMode by the end of the data. - `concat()` - Same as `collect()`, but concatenates the data
* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not into a single Buffer object. Will reject the returned promise
provided, then consume all of it. If `n` bytes are not available, then if the stream is in objectMode, or if it goes into objectMode
it returns null. **Note** consuming streams in this way is less by the end of the data.
efficient, and can lead to unnecessary Buffer copying. - `read(n)` - Consume `n` bytes of data out of the buffer. If `n`
* `destroy([er])` - Destroy the stream. If an error is provided, then an is not provided, then consume all of it. If `n` bytes are not
`'error'` event is emitted. If the stream has a `close()` method, and available, then it returns null. **Note** consuming streams in
has not emitted a `'close'` event yet, then `stream.close()` will be this way is less efficient, and can lead to unnecessary Buffer
called. Any Promises returned by `.promise()`, `.collect()` or copying.
`.concat()` will be rejected. After being destroyed, writing to the - `destroy([er])` - Destroy the stream. If an error is provided,
stream will emit an error. No more data will be emitted if the stream is then an `'error'` event is emitted. If the stream has a
destroyed, even if it was previously buffered. `close()` method, and has not emitted a `'close'` event yet,
then `stream.close()` will be called. Any Promises returned by
`.promise()`, `.collect()` or `.concat()` will be rejected.
After being destroyed, writing to the stream will emit an
error. No more data will be emitted if the stream is destroyed,
even if it was previously buffered.
### Properties ### Properties
* `bufferLength` Read-only. Total number of bytes buffered, or in the case - `bufferLength` Read-only. Total number of bytes buffered, or in
of objectMode, the total number of objects. the case of objectMode, the total number of objects.
* `encoding` The encoding that has been set. (Setting this is equivalent - `encoding` The encoding that has been set. (Setting this is
to calling `setEncoding(enc)` and has the same prohibition against equivalent to calling `setEncoding(enc)` and has the same
setting multiple times.) prohibition against setting multiple times.)
* `flowing` Read-only. Boolean indicating whether a chunk written to the - `flowing` Read-only. Boolean indicating whether a chunk written
stream will be immediately emitted. to the stream will be immediately emitted.
* `emittedEnd` Read-only. Boolean indicating whether the end-ish events - `emittedEnd` Read-only. Boolean indicating whether the end-ish
(ie, `end`, `prefinish`, `finish`) have been emitted. Note that events (ie, `end`, `prefinish`, `finish`) have been emitted.
listening on any end-ish event will immediateyl re-emit it if it has Note that listening on any end-ish event will immediateyl
already been emitted. re-emit it if it has already been emitted.
* `writable` Whether the stream is writable. Default `true`. Set to - `writable` Whether the stream is writable. Default `true`. Set
`false` when `end()` to `false` when `end()`
* `readable` Whether the stream is readable. Default `true`. - `readable` Whether the stream is readable. Default `true`.
* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written - `pipes` An array of Pipe objects referencing streams that this
to the stream that have not yet been emitted. (It's probably a bad idea stream is piping into.
to mess with this.) - `destroyed` A getter that indicates whether the stream was
* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that destroyed.
this stream is piping into. (It's probably a bad idea to mess with - `paused` True if the stream has been explicitly paused,
this.) otherwise false.
* `destroyed` A getter that indicates whether the stream was destroyed. - `objectMode` Indicates whether the stream is in `objectMode`.
* `paused` True if the stream has been explicitly paused, otherwise false. Once set to `true`, it cannot be set to `false`.
* `objectMode` Indicates whether the stream is in `objectMode`. Once set - `aborted` Readonly property set when the `AbortSignal`
to `true`, it cannot be set to `false`. dispatches an `abort` event.
### Events ### Events
* `data` Emitted when there's data to read. Argument is the data to read. - `data` Emitted when there's data to read. Argument is the data
This is never emitted while not flowing. If a listener is attached, that to read. This is never emitted while not flowing. If a listener
will resume the stream. is attached, that will resume the stream.
* `end` Emitted when there's no more data to read. This will be emitted - `end` Emitted when there's no more data to read. This will be
immediately for empty streams when `end()` is called. If a listener is emitted immediately for empty streams when `end()` is called.
attached, and `end` was already emitted, then it will be emitted again. If a listener is attached, and `end` was already emitted, then
All listeners are removed when `end` is emitted. it will be emitted again. All listeners are removed when `end`
* `prefinish` An end-ish event that follows the same logic as `end` and is is emitted.
emitted in the same conditions where `end` is emitted. Emitted after - `prefinish` An end-ish event that follows the same logic as
`'end'`. `end` and is emitted in the same conditions where `end` is
* `finish` An end-ish event that follows the same logic as `end` and is emitted. Emitted after `'end'`.
emitted in the same conditions where `end` is emitted. Emitted after - `finish` An end-ish event that follows the same logic as `end`
`'prefinish'`. and is emitted in the same conditions where `end` is emitted.
* `close` An indication that an underlying resource has been released. Emitted after `'prefinish'`.
Minipass does not emit this event, but will defer it until after `end` - `close` An indication that an underlying resource has been
has been emitted, since it throws off some stream libraries otherwise. released. Minipass does not emit this event, but will defer it
* `drain` Emitted when the internal buffer empties, and it is again until after `end` has been emitted, since it throws off some
suitable to `write()` into the stream. stream libraries otherwise.
* `readable` Emitted when data is buffered and ready to be read by a - `drain` Emitted when the internal buffer empties, and it is
consumer. again suitable to `write()` into the stream.
* `resume` Emitted when stream changes state from buffering to flowing - `readable` Emitted when data is buffered and ready to be read
mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event by a consumer.
listener is added.) - `resume` Emitted when stream changes state from buffering to
flowing mode. (Ie, when `resume` is called, `pipe` is called,
or a `data` event listener is added.)
### Static Methods ### Static Methods
* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, - `Minipass.isStream(stream)` Returns `true` if the argument is a
and false otherwise. To be considered a stream, the object must be stream, and false otherwise. To be considered a stream, the
either an instance of Minipass, or an EventEmitter that has either a object must be either an instance of Minipass, or an
`pipe()` method, or both `write()` and `end()` methods. (Pretty much any EventEmitter that has either a `pipe()` method, or both
stream in node-land will return `true` for this.) `write()` and `end()` methods. (Pretty much any stream in
node-land will return `true` for this.)
## EXAMPLES ## EXAMPLES
Here are some examples of things you can do with Minipass streams. Here are some examples of things you can do with Minipass
streams.
### simple "are you done yet" promise ### simple "are you done yet" promise
```js ```js
mp.promise().then(() => { mp.promise().then(
() => {
// stream is finished // stream is finished
}, er => { },
er => {
// stream emitted an error // stream emitted an error
}) }
)
``` ```
### collecting ### collecting
@ -531,9 +570,9 @@ mp.collect().then(all => {
### collecting into a single blob ### collecting into a single blob
This is a bit slower because it concatenates the data into one chunk for This is a bit slower because it concatenates the data into one
you, but if you're going to do it yourself anyway, it's convenient this chunk for you, but if you're going to do it yourself anyway, it's
way: convenient this way:
```js ```js
mp.concat().then(onebigchunk => { mp.concat().then(onebigchunk => {
@ -544,17 +583,18 @@ mp.concat().then(onebigchunk => {
### iteration ### iteration
You can iterate over streams synchronously or asynchronously in platforms You can iterate over streams synchronously or asynchronously in
that support it. platforms that support it.
Synchronous iteration will end when the currently available data is Synchronous iteration will end when the currently available data
consumed, even if the `end` event has not been reached. In string and is consumed, even if the `end` event has not been reached. In
buffer mode, the data is concatenated, so unless multiple writes are string and buffer mode, the data is concatenated, so unless
occurring in the same tick as the `read()`, sync iteration loops will multiple writes are occurring in the same tick as the `read()`,
generally only have a single iteration. sync iteration loops will generally only have a single iteration.
To consume chunks in this way exactly as they have been written, with no To consume chunks in this way exactly as they have been written,
flattening, create the stream with the `{ objectMode: true }` option. with no flattening, create the stream with the `{ objectMode:
true }` option.
```js ```js
const mp = new Minipass({ objectMode: true }) const mp = new Minipass({ objectMode: true })
@ -587,8 +627,7 @@ const mp = new Minipass({ encoding: 'utf8' })
// some source of some data // some source of some data
let i = 5 let i = 5
const inter = setInterval(() => { const inter = setInterval(() => {
if (i-- > 0) if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8'))
mp.write(Buffer.from('foo\n', 'utf8'))
else { else {
mp.end() mp.end()
clearInterval(inter) clearInterval(inter)
@ -629,7 +668,8 @@ someSource.pipe(new Logger()).pipe(someDest)
```js ```js
// js classes are fun // js classes are fun
someSource someSource
.pipe(new (class extends Minipass { .pipe(
new (class extends Minipass {
emit(ev, ...data) { emit(ev, ...data) {
// let's also log events, because debugging some weird thing // let's also log events, because debugging some weird thing
console.log('EMIT', ev) console.log('EMIT', ev)
@ -643,7 +683,8 @@ someSource
console.log('END', chunk, encoding) console.log('END', chunk, encoding)
return super.end(chunk, encoding, callback) return super.end(chunk, encoding, callback)
} }
})) })()
)
.pipe(someDest) .pipe(someDest)
``` ```
@ -704,7 +745,7 @@ class NDJSONDecode extends Minipass {
typeof encoding === 'string' && typeof encoding === 'string' &&
encoding !== 'utf8') { encoding !== 'utf8') {
chunk = Buffer.from(chunk, encoding).toString() chunk = Buffer.from(chunk, encoding).toString()
} else if (Buffer.isBuffer(chunk)) } else if (Buffer.isBuffer(chunk)) {
chunk = chunk.toString() chunk = chunk.toString()
} }
if (typeof encoding === 'function') { if (typeof encoding === 'function') {

45
node_modules/minipass/index.d.ts generated vendored

@ -6,56 +6,62 @@
import { EventEmitter } from 'events' import { EventEmitter } from 'events'
import { Stream } from 'stream' import { Stream } from 'stream'
declare namespace Minipass { export namespace Minipass {
type Encoding = BufferEncoding | 'buffer' | null export type Encoding = BufferEncoding | 'buffer' | null
interface Writable extends EventEmitter { export interface Writable extends EventEmitter {
end(): any end(): any
write(chunk: any, ...args: any[]): any write(chunk: any, ...args: any[]): any
} }
interface Readable extends EventEmitter { export interface Readable extends EventEmitter {
pause(): any pause(): any
resume(): any resume(): any
pipe(): any pipe(): any
} }
type DualIterable<T> = Iterable<T> & AsyncIterable<T> export type DualIterable<T> = Iterable<T> & AsyncIterable<T>
type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string export type ContiguousData =
| Buffer
| ArrayBufferLike
| ArrayBufferView
| string
type BufferOrString = Buffer | string export type BufferOrString = Buffer | string
interface StringOptions { export interface SharedOptions {
async?: boolean
signal?: AbortSignal
}
export interface StringOptions extends SharedOptions {
encoding: BufferEncoding encoding: BufferEncoding
objectMode?: boolean objectMode?: boolean
async?: boolean
} }
interface BufferOptions { export interface BufferOptions extends SharedOptions {
encoding?: null | 'buffer' encoding?: null | 'buffer'
objectMode?: boolean objectMode?: boolean
async?: boolean
} }
interface ObjectModeOptions { export interface ObjectModeOptions extends SharedOptions {
objectMode: true objectMode: true
async?: boolean
} }
interface PipeOptions { export interface PipeOptions {
end?: boolean end?: boolean
proxyErrors?: boolean proxyErrors?: boolean
} }
type Options<T> = T extends string export type Options<T> = T extends string
? StringOptions ? StringOptions
: T extends Buffer : T extends Buffer
? BufferOptions ? BufferOptions
: ObjectModeOptions : ObjectModeOptions
} }
declare class Minipass< export class Minipass<
RType extends any = Buffer, RType extends any = Buffer,
WType extends any = RType extends Minipass.BufferOrString WType extends any = RType extends Minipass.BufferOrString
? Minipass.ContiguousData ? Minipass.ContiguousData
@ -70,6 +76,7 @@ declare class Minipass<
readonly flowing: boolean readonly flowing: boolean
readonly writable: boolean readonly writable: boolean
readonly readable: boolean readonly readable: boolean
readonly aborted: boolean
readonly paused: boolean readonly paused: boolean
readonly emittedEnd: boolean readonly emittedEnd: boolean
readonly destroyed: boolean readonly destroyed: boolean
@ -140,8 +147,6 @@ declare class Minipass<
listener: () => any listener: () => any
): this ): this
[Symbol.iterator](): Iterator<RType> [Symbol.iterator](): Generator<RType, void, void>
[Symbol.asyncIterator](): AsyncIterator<RType> [Symbol.asyncIterator](): AsyncGenerator<RType, void, void>
} }
export = Minipass

403
node_modules/minipass/index.js generated vendored

@ -1,11 +1,15 @@
'use strict' 'use strict'
const proc = typeof process === 'object' && process ? process : { const proc =
typeof process === 'object' && process
? process
: {
stdout: null, stdout: null,
stderr: null, stderr: null,
} }
const EE = require('events') const EE = require('events')
const Stream = require('stream') const Stream = require('stream')
const SD = require('string_decoder').StringDecoder const stringdecoder = require('string_decoder')
const SD = stringdecoder.StringDecoder
const EOF = Symbol('EOF') const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd') const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
@ -27,34 +31,38 @@ const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush') const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift') const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode') const OBJECTMODE = Symbol('objectMode')
// internal event when stream is destroyed
const DESTROYED = Symbol('destroyed') const DESTROYED = Symbol('destroyed')
// internal event when stream has an error
const ERROR = Symbol('error')
const EMITDATA = Symbol('emitData') const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd') const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2') const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async') const ASYNC = Symbol('async')
const ABORT = Symbol('abort')
const ABORTED = Symbol('aborted')
const SIGNAL = Symbol('signal')
const defer = fn => Promise.resolve().then(fn) const defer = fn => Promise.resolve().then(fn)
// TODO remove when Node v8 support drops // TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator const ASYNCITERATOR =
|| Symbol('asyncIterator not implemented') (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator const ITERATOR =
|| Symbol('iterator not implemented') (doIter && Symbol.iterator) || Symbol('iterator not implemented')
// events that mean 'the stream is over' // events that mean 'the stream is over'
// these are treated specially, and re-emitted // these are treated specially, and re-emitted
// if they are listened for after emitting. // if they are listened for after emitting.
const isEndish = ev => const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
const isArrayBuffer = b => b instanceof ArrayBuffer || const isArrayBuffer = b =>
typeof b === 'object' && b instanceof ArrayBuffer ||
(typeof b === 'object' &&
b.constructor && b.constructor &&
b.constructor.name === 'ArrayBuffer' && b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0 b.byteLength >= 0)
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
@ -73,8 +81,7 @@ class Pipe {
proxyErrors() {} proxyErrors() {}
end() { end() {
this.unpipe() this.unpipe()
if (this.opts.end) if (this.opts.end) this.dest.end()
this.dest.end()
} }
} }
@ -90,7 +97,7 @@ class PipeProxyErrors extends Pipe {
} }
} }
module.exports = class Minipass extends Stream { class Minipass extends Stream {
constructor(options) { constructor(options) {
super() super()
this[FLOWING] = false this[FLOWING] = false
@ -98,14 +105,11 @@ module.exports = class Minipass extends Stream {
this[PAUSED] = false this[PAUSED] = false
this[PIPES] = [] this[PIPES] = []
this[BUFFER] = [] this[BUFFER] = []
this[OBJECTMODE] = options && options.objectMode || false this[OBJECTMODE] = (options && options.objectMode) || false
if (this[OBJECTMODE]) if (this[OBJECTMODE]) this[ENCODING] = null
this[ENCODING] = null else this[ENCODING] = (options && options.encoding) || null
else if (this[ENCODING] === 'buffer') this[ENCODING] = null
this[ENCODING] = options && options.encoding || null this[ASYNC] = (options && !!options.async) || false
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false this[EOF] = false
this[EMITTED_END] = false this[EMITTED_END] = false
@ -122,17 +126,31 @@ module.exports = class Minipass extends Stream {
if (options && options.debugExposePipes === true) { if (options && options.debugExposePipes === true) {
Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
} }
this[SIGNAL] = options && options.signal
this[ABORTED] = false
if (this[SIGNAL]) {
this[SIGNAL].addEventListener('abort', () => this[ABORT]())
if (this[SIGNAL].aborted) {
this[ABORT]()
}
}
} }
get bufferLength () { return this[BUFFERLENGTH] } get bufferLength() {
return this[BUFFERLENGTH]
}
get encoding () { return this[ENCODING] } get encoding() {
return this[ENCODING]
}
set encoding(enc) { set encoding(enc) {
if (this[OBJECTMODE]) if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
throw new Error('cannot set encoding in objectMode')
if (this[ENCODING] && enc !== this[ENCODING] && if (
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) this[ENCODING] &&
enc !== this[ENCODING] &&
((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
)
throw new Error('cannot change encoding') throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) { if (this[ENCODING] !== enc) {
@ -148,29 +166,50 @@ module.exports = class Minipass extends Stream {
this.encoding = enc this.encoding = enc
} }
get objectMode () { return this[OBJECTMODE] } get objectMode() {
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } return this[OBJECTMODE]
}
set objectMode(om) {
this[OBJECTMODE] = this[OBJECTMODE] || !!om
}
get ['async'] () { return this[ASYNC] } get ['async']() {
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } return this[ASYNC]
}
set ['async'](a) {
this[ASYNC] = this[ASYNC] || !!a
}
// drop everything and get out of the flow completely
[ABORT]() {
this[ABORTED] = true
this.emit('abort', this[SIGNAL].reason)
this.destroy(this[SIGNAL].reason)
}
get aborted() {
return this[ABORTED]
}
set aborted(_) {}
write(chunk, encoding, cb) { write(chunk, encoding, cb) {
if (this[EOF]) if (this[ABORTED]) return false
throw new Error('write after end') if (this[EOF]) throw new Error('write after end')
if (this[DESTROYED]) { if (this[DESTROYED]) {
this.emit('error', Object.assign( this.emit(
'error',
Object.assign(
new Error('Cannot call write after a stream was destroyed'), new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' } { code: 'ERR_STREAM_DESTROYED' }
)) )
)
return true return true
} }
if (typeof encoding === 'function') if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
cb = encoding, encoding = 'utf8'
if (!encoding) if (!encoding) encoding = 'utf8'
encoding = 'utf8'
const fn = this[ASYNC] ? defer : f => f() const fn = this[ASYNC] ? defer : f => f()
@ -181,8 +220,7 @@ module.exports = class Minipass extends Stream {
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk)) if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk)) else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string') else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set // use the setter so we throw if we have encoding set
this.objectMode = true this.objectMode = true
@ -192,19 +230,14 @@ module.exports = class Minipass extends Stream {
// this yields better performance, fewer checks later. // this yields better performance, fewer checks later.
if (this[OBJECTMODE]) { if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */ /* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0) if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
this[FLUSH](true)
if (this.flowing) if (this.flowing) this.emit('data', chunk)
this.emit('data', chunk) else this[BUFFERPUSH](chunk)
else
this[BUFFERPUSH](chunk)
if (this[BUFFERLENGTH] !== 0) if (this[BUFFERLENGTH] !== 0) this.emit('readable')
this.emit('readable')
if (cb) if (cb) fn(cb)
fn(cb)
return this.flowing return this.flowing
} }
@ -212,18 +245,18 @@ module.exports = class Minipass extends Stream {
// at this point the chunk is a buffer or string // at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder // don't buffer it up or send it to the decoder
if (!chunk.length) { if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0) if (this[BUFFERLENGTH] !== 0) this.emit('readable')
this.emit('readable') if (cb) fn(cb)
if (cb)
fn(cb)
return this.flowing return this.flowing
} }
// fast-path writing strings of same encoding to a stream with // fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance // an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' && if (
typeof chunk === 'string' &&
// unless it is a string already ready for us to use // unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
) {
chunk = Buffer.from(chunk, encoding) chunk = Buffer.from(chunk, encoding)
} }
@ -231,40 +264,31 @@ module.exports = class Minipass extends Stream {
chunk = this[DECODER].write(chunk) chunk = this[DECODER].write(chunk)
// Note: flushing CAN potentially switch us into not-flowing mode // Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0) if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
this[FLUSH](true)
if (this.flowing) if (this.flowing) this.emit('data', chunk)
this.emit('data', chunk) else this[BUFFERPUSH](chunk)
else
this[BUFFERPUSH](chunk)
if (this[BUFFERLENGTH] !== 0) if (this[BUFFERLENGTH] !== 0) this.emit('readable')
this.emit('readable')
if (cb) if (cb) fn(cb)
fn(cb)
return this.flowing return this.flowing
} }
read(n) { read(n) {
if (this[DESTROYED]) if (this[DESTROYED]) return null
return null
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]() this[MAYBE_EMIT_END]()
return null return null
} }
if (this[OBJECTMODE]) if (this[OBJECTMODE]) n = null
n = null
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
if (this.encoding) if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
this[BUFFER] = [this[BUFFER].join('')] else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
else
this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
} }
const ret = this[READ](n || null, this[BUFFER][0]) const ret = this[READ](n || null, this[BUFFER][0])
@ -273,8 +297,7 @@ module.exports = class Minipass extends Stream {
} }
[READ](n, chunk) { [READ](n, chunk) {
if (n === chunk.length || n === null) if (n === chunk.length || n === null) this[BUFFERSHIFT]()
this[BUFFERSHIFT]()
else { else {
this[BUFFER][0] = chunk.slice(n) this[BUFFER][0] = chunk.slice(n)
chunk = chunk.slice(0, n) chunk = chunk.slice(0, n)
@ -283,21 +306,16 @@ module.exports = class Minipass extends Stream {
this.emit('data', chunk) this.emit('data', chunk)
if (!this[BUFFER].length && !this[EOF]) if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
this.emit('drain')
return chunk return chunk
} }
end(chunk, encoding, cb) { end(chunk, encoding, cb) {
if (typeof chunk === 'function') if (typeof chunk === 'function') (cb = chunk), (chunk = null)
cb = chunk, chunk = null if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
if (typeof encoding === 'function') if (chunk) this.write(chunk, encoding)
cb = encoding, encoding = 'utf8' if (cb) this.once('end', cb)
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true this[EOF] = true
this.writable = false this.writable = false
@ -305,25 +323,20 @@ module.exports = class Minipass extends Stream {
// even if we're not reading. // even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway. // we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases. // This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED]) if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
this[MAYBE_EMIT_END]()
return this return this
} }
// don't let the internal resume be overwritten // don't let the internal resume be overwritten
[RESUME]() { [RESUME]() {
if (this[DESTROYED]) if (this[DESTROYED]) return
return
this[PAUSED] = false this[PAUSED] = false
this[FLOWING] = true this[FLOWING] = true
this.emit('resume') this.emit('resume')
if (this[BUFFER].length) if (this[BUFFER].length) this[FLUSH]()
this[FLUSH]() else if (this[EOF]) this[MAYBE_EMIT_END]()
else if (this[EOF]) else this.emit('drain')
this[MAYBE_EMIT_END]()
else
this.emit('drain')
} }
resume() { resume() {
@ -348,57 +361,48 @@ module.exports = class Minipass extends Stream {
} }
[BUFFERPUSH](chunk) { [BUFFERPUSH](chunk) {
if (this[OBJECTMODE]) if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
this[BUFFERLENGTH] += 1 else this[BUFFERLENGTH] += chunk.length
else
this[BUFFERLENGTH] += chunk.length
this[BUFFER].push(chunk) this[BUFFER].push(chunk)
} }
[BUFFERSHIFT]() { [BUFFERSHIFT]() {
if (this[BUFFER].length) { if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
if (this[OBJECTMODE]) else this[BUFFERLENGTH] -= this[BUFFER][0].length
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this[BUFFER][0].length
}
return this[BUFFER].shift() return this[BUFFER].shift()
} }
[FLUSH](noDrain) { [FLUSH](noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
if (!noDrain && !this[BUFFER].length && !this[EOF]) if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
this.emit('drain')
} }
[FLUSHCHUNK](chunk) { [FLUSHCHUNK](chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false this.emit('data', chunk)
return this.flowing
} }
pipe(dest, opts) { pipe(dest, opts) {
if (this[DESTROYED]) if (this[DESTROYED]) return
return
const ended = this[EMITTED_END] const ended = this[EMITTED_END]
opts = opts || {} opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr) if (dest === proc.stdout || dest === proc.stderr) opts.end = false
opts.end = false else opts.end = opts.end !== false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors opts.proxyErrors = !!opts.proxyErrors
// piping an ended stream ends immediately // piping an ended stream ends immediately
if (ended) { if (ended) {
if (opts.end) if (opts.end) dest.end()
dest.end()
} else { } else {
this[PIPES].push(!opts.proxyErrors ? new Pipe(this, dest, opts) this[PIPES].push(
: new PipeProxyErrors(this, dest, opts)) !opts.proxyErrors
if (this[ASYNC]) ? new Pipe(this, dest, opts)
defer(() => this[RESUME]()) : new PipeProxyErrors(this, dest, opts)
else )
this[RESUME]() if (this[ASYNC]) defer(() => this[RESUME]())
else this[RESUME]()
} }
return dest return dest
@ -418,18 +422,15 @@ module.exports = class Minipass extends Stream {
on(ev, fn) { on(ev, fn) {
const ret = super.on(ev, fn) const ret = super.on(ev, fn)
if (ev === 'data' && !this[PIPES].length && !this.flowing) if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
this[RESUME]()
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable') super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) { else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev) super.emit(ev)
this.removeAllListeners(ev) this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) { } else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC]) if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
defer(() => fn.call(this, this[EMITTED_ERROR])) else fn.call(this, this[EMITTED_ERROR])
else
fn.call(this, this[EMITTED_ERROR])
} }
return ret return ret
} }
@ -439,17 +440,18 @@ module.exports = class Minipass extends Stream {
} }
[MAYBE_EMIT_END]() { [MAYBE_EMIT_END]() {
if (!this[EMITTING_END] && if (
!this[EMITTING_END] &&
!this[EMITTED_END] && !this[EMITTED_END] &&
!this[DESTROYED] && !this[DESTROYED] &&
this[BUFFER].length === 0 && this[BUFFER].length === 0 &&
this[EOF]) { this[EOF]
) {
this[EMITTING_END] = true this[EMITTING_END] = true
this.emit('end') this.emit('end')
this.emit('prefinish') this.emit('prefinish')
this.emit('finish') this.emit('finish')
if (this[CLOSED]) if (this[CLOSED]) this.emit('close')
this.emit('close')
this[EMITTING_END] = false this[EMITTING_END] = false
} }
} }
@ -459,22 +461,27 @@ module.exports = class Minipass extends Stream {
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return return
else if (ev === 'data') { else if (ev === 'data') {
return !data ? false return !this[OBJECTMODE] && !data
: this[ASYNC] ? defer(() => this[EMITDATA](data)) ? false
: this[ASYNC]
? defer(() => this[EMITDATA](data))
: this[EMITDATA](data) : this[EMITDATA](data)
} else if (ev === 'end') { } else if (ev === 'end') {
return this[EMITEND]() return this[EMITEND]()
} else if (ev === 'close') { } else if (ev === 'close') {
this[CLOSED] = true this[CLOSED] = true
// don't emit close before 'end' and 'finish' // don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED]) if (!this[EMITTED_END] && !this[DESTROYED]) return
return
const ret = super.emit('close') const ret = super.emit('close')
this.removeAllListeners('close') this.removeAllListeners('close')
return ret return ret
} else if (ev === 'error') { } else if (ev === 'error') {
this[EMITTED_ERROR] = data this[EMITTED_ERROR] = data
const ret = super.emit('error', data) super.emit(ERROR, data)
const ret =
!this[SIGNAL] || this.listeners('error').length
? super.emit('error', data)
: false
this[MAYBE_EMIT_END]() this[MAYBE_EMIT_END]()
return ret return ret
} else if (ev === 'resume') { } else if (ev === 'resume') {
@ -495,8 +502,7 @@ module.exports = class Minipass extends Stream {
[EMITDATA](data) { [EMITDATA](data) {
for (const p of this[PIPES]) { for (const p of this[PIPES]) {
if (p.dest.write(data) === false) if (p.dest.write(data) === false) this.pause()
this.pause()
} }
const ret = super.emit('data', data) const ret = super.emit('data', data)
this[MAYBE_EMIT_END]() this[MAYBE_EMIT_END]()
@ -504,15 +510,12 @@ module.exports = class Minipass extends Stream {
} }
[EMITEND]() { [EMITEND]() {
if (this[EMITTED_END]) if (this[EMITTED_END]) return
return
this[EMITTED_END] = true this[EMITTED_END] = true
this.readable = false this.readable = false
if (this[ASYNC]) if (this[ASYNC]) defer(() => this[EMITEND2]())
defer(() => this[EMITEND2]()) else this[EMITEND2]()
else
this[EMITEND2]()
} }
[EMITEND2]() { [EMITEND2]() {
@ -537,15 +540,13 @@ module.exports = class Minipass extends Stream {
// const all = await stream.collect() // const all = await stream.collect()
collect() { collect() {
const buf = [] const buf = []
if (!this[OBJECTMODE]) if (!this[OBJECTMODE]) buf.dataLength = 0
buf.dataLength = 0
// set the promise first, in case an error is raised // set the promise first, in case an error is raised
// by triggering the flow here. // by triggering the flow here.
const p = this.promise() const p = this.promise()
this.on('data', c => { this.on('data', c => {
buf.push(c) buf.push(c)
if (!this[OBJECTMODE]) if (!this[OBJECTMODE]) buf.dataLength += c.length
buf.dataLength += c.length
}) })
return p.then(() => buf) return p.then(() => buf)
} }
@ -557,7 +558,10 @@ module.exports = class Minipass extends Stream {
: this.collect().then(buf => : this.collect().then(buf =>
this[OBJECTMODE] this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode')) ? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) : this[ENCODING]
? buf.join('')
: Buffer.concat(buf, buf.dataLength)
)
} }
// stream.promise().then(() => done, er => emitted error) // stream.promise().then(() => done, er => emitted error)
@ -571,30 +575,40 @@ module.exports = class Minipass extends Stream {
// for await (let chunk of stream) // for await (let chunk of stream)
[ASYNCITERATOR]() { [ASYNCITERATOR]() {
let stopped = false
const stop = () => {
this.pause()
stopped = true
return Promise.resolve({ done: true })
}
const next = () => { const next = () => {
if (stopped) return stop()
const res = this.read() const res = this.read()
if (res !== null) if (res !== null) return Promise.resolve({ done: false, value: res })
return Promise.resolve({ done: false, value: res })
if (this[EOF]) if (this[EOF]) return stop()
return Promise.resolve({ done: true })
let resolve = null let resolve = null
let reject = null let reject = null
const onerr = er => { const onerr = er => {
this.removeListener('data', ondata) this.removeListener('data', ondata)
this.removeListener('end', onend) this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
stop()
reject(er) reject(er)
} }
const ondata = value => { const ondata = value => {
this.removeListener('error', onerr) this.removeListener('error', onerr)
this.removeListener('end', onend) this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
this.pause() this.pause()
resolve({ value: value, done: !!this[EOF] }) resolve({ value: value, done: !!this[EOF] })
} }
const onend = () => { const onend = () => {
this.removeListener('error', onerr) this.removeListener('error', onerr)
this.removeListener('data', ondata) this.removeListener('data', ondata)
this.removeListener(DESTROYED, ondestroy)
stop()
resolve({ done: true }) resolve({ done: true })
} }
const ondestroy = () => onerr(new Error('stream destroyed')) const ondestroy = () => onerr(new Error('stream destroyed'))
@ -608,25 +622,51 @@ module.exports = class Minipass extends Stream {
}) })
} }
return { next } return {
next,
throw: stop,
return: stop,
[ASYNCITERATOR]() {
return this
},
}
} }
// for (let chunk of stream) // for (let chunk of stream)
[ITERATOR]() { [ITERATOR]() {
let stopped = false
const stop = () => {
this.pause()
this.removeListener(ERROR, stop)
this.removeListener(DESTROYED, stop)
this.removeListener('end', stop)
stopped = true
return { done: true }
}
const next = () => { const next = () => {
if (stopped) return stop()
const value = this.read() const value = this.read()
const done = value === null return value === null ? stop() : { value }
return { value, done } }
this.once('end', stop)
this.once(ERROR, stop)
this.once(DESTROYED, stop)
return {
next,
throw: stop,
return: stop,
[ITERATOR]() {
return this
},
} }
return { next }
} }
destroy(er) { destroy(er) {
if (this[DESTROYED]) { if (this[DESTROYED]) {
if (er) if (er) this.emit('error', er)
this.emit('error', er) else this.emit(DESTROYED)
else
this.emit(DESTROYED)
return this return this
} }
@ -636,22 +676,27 @@ module.exports = class Minipass extends Stream {
this[BUFFER].length = 0 this[BUFFER].length = 0
this[BUFFERLENGTH] = 0 this[BUFFERLENGTH] = 0
if (typeof this.close === 'function' && !this[CLOSED]) if (typeof this.close === 'function' && !this[CLOSED]) this.close()
this.close()
if (er) if (er) this.emit('error', er)
this.emit('error', er) // if no error to emit, still reject pending promises
else // if no error to emit, still reject pending promises else this.emit(DESTROYED)
this.emit(DESTROYED)
return this return this
} }
static isStream(s) { static isStream(s) {
return !!s && (s instanceof Minipass || s instanceof Stream || return (
s instanceof EE && ( !!s &&
typeof s.pipe === 'function' || // readable (s instanceof Minipass ||
(typeof s.write === 'function' && typeof s.end === 'function') // writable s instanceof Stream ||
)) (s instanceof EE &&
// readable
(typeof s.pipe === 'function' ||
// writable
(typeof s.write === 'function' && typeof s.end === 'function'))))
)
} }
} }
exports.Minipass = Minipass

@ -1,15 +0,0 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

@ -1,204 +0,0 @@
# yallist
Yet Another Linked List
There are many doubly-linked list implementations like it, but this
one is mine.
For when an array would be too big, and a Map can't be iterated in
reverse order.
[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
## basic usage
```javascript
var yallist = require('yallist')
var myList = yallist.create([1, 2, 3])
myList.push('foo')
myList.unshift('bar')
// of course pop() and shift() are there, too
console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
myList.forEach(function (k) {
// walk the list head to tail
})
myList.forEachReverse(function (k, index, list) {
// walk the list tail to head
})
var myDoubledList = myList.map(function (k) {
return k + k
})
// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
// mapReverse is also a thing
var myDoubledListReverse = myList.mapReverse(function (k) {
return k + k
}) // ['foofoo', 6, 4, 2, 'barbar']
var reduced = myList.reduce(function (set, entry) {
set += entry
return set
}, 'start')
console.log(reduced) // 'startfoo123bar'
```
## api
The whole API is considered "public".
Functions with the same name as an Array method work more or less the
same way.
There's reverse versions of most things because that's the point.
### Yallist
Default export, the class that holds and manages a list.
Call it with either a forEach-able (like an array) or a set of
arguments, to initialize the list.
The Array-ish methods all act like you'd expect. No magic length,
though, so if you change that it won't automatically prune or add
empty spots.
### Yallist.create(..)
Alias for Yallist function. Some people like factories.
#### yallist.head
The first node in the list
#### yallist.tail
The last node in the list
#### yallist.length
The number of nodes in the list. (Change this at your peril. It is
not magic like Array length.)
#### yallist.toArray()
Convert the list to an array.
#### yallist.forEach(fn, [thisp])
Call a function on each item in the list.
#### yallist.forEachReverse(fn, [thisp])
Call a function on each item in the list, in reverse order.
#### yallist.get(n)
Get the data at position `n` in the list. If you use this a lot,
probably better off just using an Array.
#### yallist.getReverse(n)
Get the data at position `n`, counting from the tail.
#### yallist.map(fn, thisp)
Create a new Yallist with the result of calling the function on each
item.
#### yallist.mapReverse(fn, thisp)
Same as `map`, but in reverse.
#### yallist.pop()
Get the data from the list tail, and remove the tail from the list.
#### yallist.push(item, ...)
Insert one or more items to the tail of the list.
#### yallist.reduce(fn, initialValue)
Like Array.reduce.
#### yallist.reduceReverse
Like Array.reduce, but in reverse.
#### yallist.reverse
Reverse the list in place.
#### yallist.shift()
Get the data from the list head, and remove the head from the list.
#### yallist.slice([from], [to])
Just like Array.slice, but returns a new Yallist.
#### yallist.sliceReverse([from], [to])
Just like yallist.slice, but the result is returned in reverse.
#### yallist.toArray()
Create an array representation of the list.
#### yallist.toArrayReverse()
Create a reversed array representation of the list.
#### yallist.unshift(item, ...)
Insert one or more items to the head of the list.
#### yallist.unshiftNode(node)
Move a Node object to the front of the list. (That is, pull it out of
wherever it lives, and make it the new head.)
If the node belongs to a different list, then that list will remove it
first.
#### yallist.pushNode(node)
Move a Node object to the end of the list. (That is, pull it out of
wherever it lives, and make it the new tail.)
If the node belongs to a list already, then that list will remove it
first.
#### yallist.removeNode(node)
Remove a node from the list, preserving referential integrity of head
and tail and other nodes.
Will throw an error if you try to have a list remove a node that
doesn't belong to it.
### Yallist.Node
The class that holds the data and is actually the list.
Call with `var n = new Node(value, previousNode, nextNode)`
Note that if you do direct operations on Nodes themselves, it's very
easy to get into weird states where the list is broken. Be careful :)
#### node.next
The next node in the list.
#### node.prev
The previous node in the list.
#### node.value
The data the node contains.
#### node.list
The list to which this node belongs. (Null if it does not belong to
any list.)

@ -1,8 +0,0 @@
'use strict'
module.exports = function (Yallist) {
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) {
yield walker.value
}
}
}

@ -1,29 +0,0 @@
{
"name": "yallist",
"version": "4.0.0",
"description": "Yet Another Linked List",
"main": "yallist.js",
"directories": {
"test": "test"
},
"files": [
"yallist.js",
"iterator.js"
],
"dependencies": {},
"devDependencies": {
"tap": "^12.1.0"
},
"scripts": {
"test": "tap test/*.js --100",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
},
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC"
}

@ -1,426 +0,0 @@
'use strict'
module.exports = Yallist
Yallist.Node = Node
Yallist.create = Yallist
function Yallist (list) {
var self = this
if (!(self instanceof Yallist)) {
self = new Yallist()
}
self.tail = null
self.head = null
self.length = 0
if (list && typeof list.forEach === 'function') {
list.forEach(function (item) {
self.push(item)
})
} else if (arguments.length > 0) {
for (var i = 0, l = arguments.length; i < l; i++) {
self.push(arguments[i])
}
}
return self
}
Yallist.prototype.removeNode = function (node) {
if (node.list !== this) {
throw new Error('removing node which does not belong to this list')
}
var next = node.next
var prev = node.prev
if (next) {
next.prev = prev
}
if (prev) {
prev.next = next
}
if (node === this.head) {
this.head = next
}
if (node === this.tail) {
this.tail = prev
}
node.list.length--
node.next = null
node.prev = null
node.list = null
return next
}
Yallist.prototype.unshiftNode = function (node) {
if (node === this.head) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var head = this.head
node.list = this
node.next = head
if (head) {
head.prev = node
}
this.head = node
if (!this.tail) {
this.tail = node
}
this.length++
}
Yallist.prototype.pushNode = function (node) {
if (node === this.tail) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var tail = this.tail
node.list = this
node.prev = tail
if (tail) {
tail.next = node
}
this.tail = node
if (!this.head) {
this.head = node
}
this.length++
}
Yallist.prototype.push = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
push(this, arguments[i])
}
return this.length
}
Yallist.prototype.unshift = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
unshift(this, arguments[i])
}
return this.length
}
Yallist.prototype.pop = function () {
if (!this.tail) {
return undefined
}
var res = this.tail.value
this.tail = this.tail.prev
if (this.tail) {
this.tail.next = null
} else {
this.head = null
}
this.length--
return res
}
Yallist.prototype.shift = function () {
if (!this.head) {
return undefined
}
var res = this.head.value
this.head = this.head.next
if (this.head) {
this.head.prev = null
} else {
this.tail = null
}
this.length--
return res
}
Yallist.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.head, i = 0; walker !== null; i++) {
fn.call(thisp, walker.value, i, this)
walker = walker.next
}
}
Yallist.prototype.forEachReverse = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
fn.call(thisp, walker.value, i, this)
walker = walker.prev
}
}
Yallist.prototype.get = function (n) {
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.next
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.getReverse = function (n) {
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.prev
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.map = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.head; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.next
}
return res
}
Yallist.prototype.mapReverse = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.tail; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.prev
}
return res
}
Yallist.prototype.reduce = function (fn, initial) {
var acc
var walker = this.head
if (arguments.length > 1) {
acc = initial
} else if (this.head) {
walker = this.head.next
acc = this.head.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = 0; walker !== null; i++) {
acc = fn(acc, walker.value, i)
walker = walker.next
}
return acc
}
Yallist.prototype.reduceReverse = function (fn, initial) {
var acc
var walker = this.tail
if (arguments.length > 1) {
acc = initial
} else if (this.tail) {
walker = this.tail.prev
acc = this.tail.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = this.length - 1; walker !== null; i--) {
acc = fn(acc, walker.value, i)
walker = walker.prev
}
return acc
}
Yallist.prototype.toArray = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.head; walker !== null; i++) {
arr[i] = walker.value
walker = walker.next
}
return arr
}
Yallist.prototype.toArrayReverse = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.tail; walker !== null; i++) {
arr[i] = walker.value
walker = walker.prev
}
return arr
}
Yallist.prototype.slice = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
walker = walker.next
}
for (; walker !== null && i < to; i++, walker = walker.next) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.sliceReverse = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
walker = walker.prev
}
for (; walker !== null && i > from; i--, walker = walker.prev) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 0; i < nodes.length; i++) {
walker = insert(this, walker, nodes[i])
}
return ret;
}
Yallist.prototype.reverse = function () {
var head = this.head
var tail = this.tail
for (var walker = head; walker !== null; walker = walker.prev) {
var p = walker.prev
walker.prev = walker.next
walker.next = p
}
this.head = tail
this.tail = head
return this
}
function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) {
self.tail = new Node(item, self.tail, null, self)
if (!self.head) {
self.head = self.tail
}
self.length++
}
function unshift (self, item) {
self.head = new Node(item, null, self.head, self)
if (!self.tail) {
self.tail = self.head
}
self.length++
}
function Node (value, prev, next, list) {
if (!(this instanceof Node)) {
return new Node(value, prev, next, list)
}
this.list = list
this.value = value
if (prev) {
prev.next = this
this.prev = prev
} else {
this.prev = null
}
if (next) {
next.prev = this
this.next = next
} else {
this.next = null
}
}
try {
// add if support for Symbol.iterator is present
require('./iterator.js')(Yallist)
} catch (er) {}

@ -1,26 +1,45 @@
{ {
"name": "minipass", "name": "minipass",
"version": "4.0.0", "version": "5.0.0",
"description": "minimal implementation of a PassThrough stream", "description": "minimal implementation of a PassThrough stream",
"main": "index.js", "main": "./index.js",
"types": "index.d.ts", "module": "./index.mjs",
"dependencies": { "types": "./index.d.ts",
"yallist": "^4.0.0" "exports": {
".": {
"import": {
"types": "./index.d.ts",
"default": "./index.mjs"
},
"require": {
"types": "./index.d.ts",
"default": "./index.js"
}
},
"./package.json": "./package.json"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^17.0.41", "@types/node": "^17.0.41",
"end-of-stream": "^1.4.0", "end-of-stream": "^1.4.0",
"node-abort-controller": "^3.1.1",
"prettier": "^2.6.2", "prettier": "^2.6.2",
"tap": "^16.2.0", "tap": "^16.2.0",
"through2": "^2.0.3", "through2": "^2.0.3",
"ts-node": "^10.8.1", "ts-node": "^10.8.1",
"typedoc": "^0.23.24",
"typescript": "^4.7.3" "typescript": "^4.7.3"
}, },
"scripts": { "scripts": {
"pretest": "npm run prepare",
"presnap": "npm run prepare",
"prepare": "node ./scripts/transpile-to-esm.js",
"snap": "tap",
"test": "tap", "test": "tap",
"preversion": "npm test", "preversion": "npm test",
"postversion": "npm publish", "postversion": "npm publish",
"postpublish": "git push origin --follow-tags" "postpublish": "git push origin --follow-tags",
"typedoc": "typedoc ./index.d.ts",
"format": "prettier --write . --loglevel warn"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -34,7 +53,8 @@
"license": "ISC", "license": "ISC",
"files": [ "files": [
"index.d.ts", "index.d.ts",
"index.js" "index.js",
"index.mjs"
], ],
"tap": { "tap": {
"check-coverage": true "check-coverage": true

@ -16,15 +16,15 @@ provided by Node.js when using C++. It provides a C++ object model
and exception handling semantics with low overhead. and exception handling semantics with low overhead.
There are three options for implementing addons: Node-API, nan, or direct There are three options for implementing addons: Node-API, nan, or direct
use of internal V8, libuv and Node.js libraries. Unless there is a need for use of internal V8, libuv, and Node.js libraries. Unless there is a need for
direct access to functionality which is not exposed by Node-API as outlined direct access to functionality that is not exposed by Node-API as outlined
in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html) in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html)
in Node.js core, use Node-API. Refer to in Node.js core, use Node-API. Refer to
[C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) [C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)
for more information on Node-API. for more information on Node-API.
Node-API is an ABI stable C interface provided by Node.js for building native Node-API is an ABI stable C interface provided by Node.js for building native
addons. It is independent from the underlying JavaScript runtime (e.g. V8 or ChakraCore) addons. It is independent of the underlying JavaScript runtime (e.g. V8 or ChakraCore)
and is maintained as part of Node.js itself. It is intended to insulate and is maintained as part of Node.js itself. It is intended to insulate
native addons from changes in the underlying JavaScript engine and allow native addons from changes in the underlying JavaScript engine and allow
modules compiled for one version to run on later versions of Node.js without modules compiled for one version to run on later versions of Node.js without
@ -46,7 +46,7 @@ provides an [ABI stability guide][] containing a detailed explanation of ABI
stability in general, and the Node-API ABI stability guarantee in particular. stability in general, and the Node-API ABI stability guarantee in particular.
As new APIs are added to Node-API, node-addon-api must be updated to provide As new APIs are added to Node-API, node-addon-api must be updated to provide
wrappers for those new APIs. For this reason node-addon-api provides wrappers for those new APIs. For this reason, node-addon-api provides
methods that allow callers to obtain the underlying Node-API handles so methods that allow callers to obtain the underlying Node-API handles so
direct calls to Node-API and the use of the objects/methods provided by direct calls to Node-API and the use of the objects/methods provided by
node-addon-api can be used together. For example, in order to be able node-addon-api can be used together. For example, in order to be able
@ -56,7 +56,7 @@ APIs exposed by node-addon-api are generally used to create and
manipulate JavaScript values. Concepts and operations generally map manipulate JavaScript values. Concepts and operations generally map
to ideas specified in the **ECMA262 Language Specification**. to ideas specified in the **ECMA262 Language Specification**.
The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an
excellent orientation and tips for developers just getting started with Node-API excellent orientation and tips for developers just getting started with Node-API
and node-addon-api. and node-addon-api.
@ -70,7 +70,7 @@ and node-addon-api.
- **[Contributors](#contributors)** - **[Contributors](#contributors)**
- **[License](#license)** - **[License](#license)**
## **Current version: 5.0.0** ## **Current version: 5.1.0**
(See [CHANGELOG.md](CHANGELOG.md) for complete Changelog) (See [CHANGELOG.md](CHANGELOG.md) for complete Changelog)
@ -187,6 +187,28 @@ npm test --NAPI_VERSION=X
where X is the version of Node-API you want to target. where X is the version of Node-API you want to target.
To run a specific unit test, filter conditions are available
**Example:**
compile and run only tests on objectwrap.cc and objectwrap.js
```
npm run unit --filter=objectwrap
```
Multiple unit tests cane be selected with wildcards
**Example:**
compile and run all test files ending with "reference" -> function_reference.cc, object_reference.cc, reference.cc
```
npm run unit --filter=*reference
```
Multiple filter conditions can be joined to broaden the test selection
**Example:**
compile and run all tests under folders threadsafe_function and typed_threadsafe_function and also the objectwrap.cc file
npm run unit --filter='*function objectwrap'
### **Debug** ### **Debug**
To run the **node-addon-api** tests with `--debug` option: To run the **node-addon-api** tests with `--debug` option:
@ -195,7 +217,7 @@ To run the **node-addon-api** tests with `--debug` option:
npm run-script dev npm run-script dev
``` ```
If you want faster build, you might use the following option: If you want a faster build, you might use the following option:
``` ```
npm run-script dev:incremental npm run-script dev:incremental
@ -223,7 +245,7 @@ See [benchmark/README.md](benchmark/README.md) for more details about running an
As node-addon-api's core mission is to expose the plain C Node-API as C++ As node-addon-api's core mission is to expose the plain C Node-API as C++
wrappers, tools that facilitate n-api/node-addon-api providing more wrappers, tools that facilitate n-api/node-addon-api providing more
convenient patterns on developing a Node.js add-ons with n-api/node-addon-api convenient patterns for developing a Node.js add-on with n-api/node-addon-api
can be published to NPM as standalone packages. It is also recommended to tag can be published to NPM as standalone packages. It is also recommended to tag
such packages with `node-addon-api` to provide more visibility to the community. such packages with `node-addon-api` to provide more visibility to the community.
@ -269,19 +291,21 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on our philosophy around
| ------------------- | ----------------------------------------------------- | | ------------------- | ----------------------------------------------------- |
| Anna Henningsen | [addaleax](https://github.com/addaleax) | | Anna Henningsen | [addaleax](https://github.com/addaleax) |
| Chengzhong Wu | [legendecas](https://github.com/legendecas) | | Chengzhong Wu | [legendecas](https://github.com/legendecas) |
| Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) | | Jack Xia | [JckXia](https://github.com/JckXia) |
| Jim Schlight | [jschlight](https://github.com/jschlight) | | Kevin Eady | [KevinEady](https://github.com/KevinEady) |
| Michael Dawson | [mhdawson](https://github.com/mhdawson) | | Michael Dawson | [mhdawson](https://github.com/mhdawson) |
| Kevin Eady | [KevinEady](https://github.com/KevinEady)
| Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) | | Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) |
| Vladimir Morozov | [vmoroz](https://github.com/vmoroz) |
### Emeritus ### Emeritus
| Name | GitHub Link | | Name | GitHub Link |
| ------------------- | ----------------------------------------------------- | | ------------------- | ----------------------------------------------------- |
| Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) | | Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) |
| Benjamin Byholm | [kkoopa](https://github.com/kkoopa) | | Benjamin Byholm | [kkoopa](https://github.com/kkoopa) |
| Jason Ginchereau | [jasongin](https://github.com/jasongin) | | Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) |
| Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) | | Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) |
| Jason Ginchereau | [jasongin](https://github.com/jasongin) |
| Jim Schlight | [jschlight](https://github.com/jschlight) |
| Sampson Gao | [sampsongao](https://github.com/sampsongao) | | Sampson Gao | [sampsongao](https://github.com/sampsongao) |
| Taylor Woll | [boingoing](https://github.com/boingoing) | | Taylor Woll | [boingoing](https://github.com/boingoing) |

@ -1,11 +1,11 @@
const path = require('path'); const path = require('path');
const include_dir = path.relative('.', __dirname); const includeDir = path.relative('.', __dirname);
module.exports = { module.exports = {
include: `"${__dirname}"`, // deprecated, can be removed as part of 4.0.0 include: `"${__dirname}"`, // deprecated, can be removed as part of 4.0.0
include_dir, include_dir: includeDir,
gyp: path.join(include_dir, 'node_api.gyp:nothing'), gyp: path.join(includeDir, 'node_api.gyp:nothing'),
isNodeApiBuiltin: true, isNodeApiBuiltin: true,
needsFlag: false needsFlag: false
}; };

@ -6,8 +6,8 @@
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
template <typename Getter> template <typename Getter>
inline PropertyDescriptor inline PropertyDescriptor PropertyDescriptor::Accessor(
PropertyDescriptor::Accessor(const char* utf8name, const char* utf8name,
Getter getter, Getter getter,
napi_property_attributes attributes, napi_property_attributes attributes,
void* /*data*/) { void* /*data*/) {
@ -15,20 +15,19 @@ PropertyDescriptor::Accessor(const char* utf8name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, nullptr}); auto callbackData = new CbData({getter, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({utf8name,
utf8name,
nullptr, nullptr,
nullptr, nullptr,
CbData::Wrapper, CbData::Wrapper,
nullptr, nullptr,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Getter> template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(const std::string& utf8name, inline PropertyDescriptor PropertyDescriptor::Accessor(
const std::string& utf8name,
Getter getter, Getter getter,
napi_property_attributes attributes, napi_property_attributes attributes,
void* data) { void* data) {
@ -36,7 +35,8 @@ inline PropertyDescriptor PropertyDescriptor::Accessor(const std::string& utf8na
} }
template <typename Getter> template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(napi_value name, inline PropertyDescriptor PropertyDescriptor::Accessor(
napi_value name,
Getter getter, Getter getter,
napi_property_attributes attributes, napi_property_attributes attributes,
void* /*data*/) { void* /*data*/) {
@ -44,29 +44,26 @@ inline PropertyDescriptor PropertyDescriptor::Accessor(napi_value name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, nullptr}); auto callbackData = new CbData({getter, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({nullptr,
nullptr,
name, name,
nullptr, nullptr,
CbData::Wrapper, CbData::Wrapper,
nullptr, nullptr,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Getter> template <typename Getter>
inline PropertyDescriptor PropertyDescriptor::Accessor(Name name, inline PropertyDescriptor PropertyDescriptor::Accessor(
Getter getter, Name name, Getter getter, napi_property_attributes attributes, void* data) {
napi_property_attributes attributes,
void* data) {
napi_value nameValue = name; napi_value nameValue = name;
return PropertyDescriptor::Accessor(nameValue, getter, attributes, data); return PropertyDescriptor::Accessor(nameValue, getter, attributes, data);
} }
template <typename Getter, typename Setter> template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(const char* utf8name, inline PropertyDescriptor PropertyDescriptor::Accessor(
const char* utf8name,
Getter getter, Getter getter,
Setter setter, Setter setter,
napi_property_attributes attributes, napi_property_attributes attributes,
@ -75,20 +72,19 @@ inline PropertyDescriptor PropertyDescriptor::Accessor(const char* utf8name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, setter, nullptr}); auto callbackData = new CbData({getter, setter, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({utf8name,
utf8name,
nullptr, nullptr,
nullptr, nullptr,
CbData::GetterWrapper, CbData::GetterWrapper,
CbData::SetterWrapper, CbData::SetterWrapper,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Getter, typename Setter> template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(const std::string& utf8name, inline PropertyDescriptor PropertyDescriptor::Accessor(
const std::string& utf8name,
Getter getter, Getter getter,
Setter setter, Setter setter,
napi_property_attributes attributes, napi_property_attributes attributes,
@ -97,7 +93,8 @@ inline PropertyDescriptor PropertyDescriptor::Accessor(const std::string& utf8na
} }
template <typename Getter, typename Setter> template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(napi_value name, inline PropertyDescriptor PropertyDescriptor::Accessor(
napi_value name,
Getter getter, Getter getter,
Setter setter, Setter setter,
napi_property_attributes attributes, napi_property_attributes attributes,
@ -106,30 +103,31 @@ inline PropertyDescriptor PropertyDescriptor::Accessor(napi_value name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({getter, setter, nullptr}); auto callbackData = new CbData({getter, setter, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({nullptr,
nullptr,
name, name,
nullptr, nullptr,
CbData::GetterWrapper, CbData::GetterWrapper,
CbData::SetterWrapper, CbData::SetterWrapper,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Getter, typename Setter> template <typename Getter, typename Setter>
inline PropertyDescriptor PropertyDescriptor::Accessor(Name name, inline PropertyDescriptor PropertyDescriptor::Accessor(
Name name,
Getter getter, Getter getter,
Setter setter, Setter setter,
napi_property_attributes attributes, napi_property_attributes attributes,
void* data) { void* data) {
napi_value nameValue = name; napi_value nameValue = name;
return PropertyDescriptor::Accessor(nameValue, getter, setter, attributes, data); return PropertyDescriptor::Accessor(
nameValue, getter, setter, attributes, data);
} }
template <typename Callable> template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(const char* utf8name, inline PropertyDescriptor PropertyDescriptor::Function(
const char* utf8name,
Callable cb, Callable cb,
napi_property_attributes attributes, napi_property_attributes attributes,
void* /*data*/) { void* /*data*/) {
@ -138,20 +136,19 @@ inline PropertyDescriptor PropertyDescriptor::Function(const char* utf8name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({cb, nullptr}); auto callbackData = new CbData({cb, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({utf8name,
utf8name,
nullptr, nullptr,
CbData::Wrapper, CbData::Wrapper,
nullptr, nullptr,
nullptr, nullptr,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Callable> template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(const std::string& utf8name, inline PropertyDescriptor PropertyDescriptor::Function(
const std::string& utf8name,
Callable cb, Callable cb,
napi_property_attributes attributes, napi_property_attributes attributes,
void* data) { void* data) {
@ -159,7 +156,8 @@ inline PropertyDescriptor PropertyDescriptor::Function(const std::string& utf8na
} }
template <typename Callable> template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(napi_value name, inline PropertyDescriptor PropertyDescriptor::Function(
napi_value name,
Callable cb, Callable cb,
napi_property_attributes attributes, napi_property_attributes attributes,
void* /*data*/) { void* /*data*/) {
@ -168,23 +166,19 @@ inline PropertyDescriptor PropertyDescriptor::Function(napi_value name,
// TODO: Delete when the function is destroyed // TODO: Delete when the function is destroyed
auto callbackData = new CbData({cb, nullptr}); auto callbackData = new CbData({cb, nullptr});
return PropertyDescriptor({ return PropertyDescriptor({nullptr,
nullptr,
name, name,
CbData::Wrapper, CbData::Wrapper,
nullptr, nullptr,
nullptr, nullptr,
nullptr, nullptr,
attributes, attributes,
callbackData callbackData});
});
} }
template <typename Callable> template <typename Callable>
inline PropertyDescriptor PropertyDescriptor::Function(Name name, inline PropertyDescriptor PropertyDescriptor::Function(
Callable cb, Name name, Callable cb, napi_property_attributes attributes, void* data) {
napi_property_attributes attributes,
void* data) {
napi_value nameValue = name; napi_value nameValue = name;
return PropertyDescriptor::Function(nameValue, cb, attributes, data); return PropertyDescriptor::Function(nameValue, cb, attributes, data);
} }

File diff suppressed because it is too large Load Diff

527
node_modules/node-addon-api/napi.h generated vendored

File diff suppressed because it is too large Load Diff

@ -15,6 +15,10 @@
"name": "Alexander Floh", "name": "Alexander Floh",
"url": "https://github.com/alexanderfloh" "url": "https://github.com/alexanderfloh"
}, },
{
"name": "Ammar Faizi",
"url": "https://github.com/ammarfaizi2"
},
{ {
"name": "András Timár, Dr", "name": "András Timár, Dr",
"url": "https://github.com/timarandras" "url": "https://github.com/timarandras"
@ -71,6 +75,10 @@
"name": "Daniel Bevenius", "name": "Daniel Bevenius",
"url": "https://github.com/danbev" "url": "https://github.com/danbev"
}, },
{
"name": "Dante Calderón",
"url": "https://github.com/dantehemerson"
},
{ {
"name": "Darshan Sen", "name": "Darshan Sen",
"url": "https://github.com/RaisinTen" "url": "https://github.com/RaisinTen"
@ -103,6 +111,10 @@
"name": "extremeheat", "name": "extremeheat",
"url": "https://github.com/extremeheat" "url": "https://github.com/extremeheat"
}, },
{
"name": "Feng Yu",
"url": "https://github.com/F3n67u"
},
{ {
"name": "Ferdinand Holzer", "name": "Ferdinand Holzer",
"url": "https://github.com/fholzer" "url": "https://github.com/fholzer"
@ -147,6 +159,10 @@
"name": "Jason Ginchereau", "name": "Jason Ginchereau",
"url": "https://github.com/jasongin" "url": "https://github.com/jasongin"
}, },
{
"name": "Jenny",
"url": "https://github.com/egg-bread"
},
{ {
"name": "Jeroen Janssen", "name": "Jeroen Janssen",
"url": "https://github.com/japj" "url": "https://github.com/japj"
@ -167,6 +183,10 @@
"name": "joshgarde", "name": "joshgarde",
"url": "https://github.com/joshgarde" "url": "https://github.com/joshgarde"
}, },
{
"name": "Julian Mesa",
"url": "https://github.com/julianmesa-gitkraken"
},
{ {
"name": "Kasumi Hanazuki", "name": "Kasumi Hanazuki",
"url": "https://github.com/hanazuki" "url": "https://github.com/hanazuki"
@ -199,6 +219,10 @@
"name": "Kyle Farnung", "name": "Kyle Farnung",
"url": "https://github.com/kfarnung" "url": "https://github.com/kfarnung"
}, },
{
"name": "Kyle Kovacs",
"url": "https://github.com/nullromo"
},
{ {
"name": "legendecas", "name": "legendecas",
"url": "https://github.com/legendecas" "url": "https://github.com/legendecas"
@ -267,6 +291,10 @@
"name": "pacop", "name": "pacop",
"url": "https://github.com/pacop" "url": "https://github.com/pacop"
}, },
{
"name": "Peter Šándor",
"url": "https://github.com/petersandor"
},
{ {
"name": "Philipp Renoth", "name": "Philipp Renoth",
"url": "https://github.com/DaAitch" "url": "https://github.com/DaAitch"
@ -275,6 +303,10 @@
"name": "rgerd", "name": "rgerd",
"url": "https://github.com/rgerd" "url": "https://github.com/rgerd"
}, },
{
"name": "Richard Lau",
"url": "https://github.com/richardlau"
},
{ {
"name": "Rolf Timmermans", "name": "Rolf Timmermans",
"url": "https://github.com/rolftimmermans" "url": "https://github.com/rolftimmermans"
@ -287,6 +319,10 @@
"name": "Ryuichi Okumura", "name": "Ryuichi Okumura",
"url": "https://github.com/okuryu" "url": "https://github.com/okuryu"
}, },
{
"name": "Saint Gabriel",
"url": "https://github.com/chineduG"
},
{ {
"name": "Sampson Gao", "name": "Sampson Gao",
"url": "https://github.com/sampsongao" "url": "https://github.com/sampsongao"
@ -351,6 +387,10 @@
{ {
"name": "Ziqiu Zhao", "name": "Ziqiu Zhao",
"url": "https://github.com/ZzqiZQute" "url": "https://github.com/ZzqiZQute"
},
{
"name": "Feng Yu",
"url": "https://github.com/F3n67u"
} }
], ],
"description": "Node.js API (Node-API)", "description": "Node.js API (Node-API)",
@ -401,6 +441,7 @@
"benchmark": "node benchmark", "benchmark": "node benchmark",
"pretest": "node-gyp rebuild -C test", "pretest": "node-gyp rebuild -C test",
"test": "node test", "test": "node test",
"test:debug": "node-gyp rebuild -C test --debug && NODE_API_BUILD_CONFIG=Debug node ./test/index.js",
"predev": "node-gyp rebuild -C test --debug", "predev": "node-gyp rebuild -C test --debug",
"dev": "node test", "dev": "node test",
"predev:incremental": "node-gyp configure build -C test --debug", "predev:incremental": "node-gyp configure build -C test --debug",
@ -410,6 +451,6 @@
"lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix" "lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix"
}, },
"pre-commit": "lint", "pre-commit": "lint",
"version": "5.0.0", "version": "5.1.0",
"support": true "support": true
} }

@ -4,16 +4,15 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const child_process = require('child_process');
// Read the output of the command, break it into lines, and use the reducer to // Read the output of the command, break it into lines, and use the reducer to
// decide whether the file is an N-API module or not. // decide whether the file is an N-API module or not.
function checkFile (file, command, argv, reducer) { function checkFile (file, command, argv, reducer) {
const child = child_process.spawn(command, argv, { const child = require('child_process').spawn(command, argv, {
stdio: ['inherit', 'pipe', 'inherit'] stdio: ['inherit', 'pipe', 'inherit']
}); });
let leftover = ''; let leftover = '';
let isNapi = undefined; let isNapi;
child.stdout.on('data', (chunk) => { child.stdout.on('data', (chunk) => {
if (isNapi === undefined) { if (isNapi === undefined) {
chunk = (leftover + chunk.toString()).split(/[\r\n]+/); chunk = (leftover + chunk.toString()).split(/[\r\n]+/);
@ -71,13 +70,13 @@ function checkFileWin32(file) {
function recurse (top) { function recurse (top) {
fs.readdir(top, (error, items) => { fs.readdir(top, (error, items) => {
if (error) { if (error) {
throw ("error reading directory " + top + ": " + error); throw new Error('error reading directory ' + top + ': ' + error);
} }
items.forEach((item) => { items.forEach((item) => {
item = path.join(top, item); item = path.join(top, item);
fs.stat(item, ((item) => (error, stats) => { fs.stat(item, ((item) => (error, stats) => {
if (error) { if (error) {
throw ("error about " + item + ": " + error); throw new Error('error about ' + item + ': ' + error);
} }
if (stats.isDirectory()) { if (stats.isDirectory()) {
recurse(item); recurse(item);
@ -86,9 +85,9 @@ function recurse(top) {
// artefacts of node-addon-api having identified a version of // artefacts of node-addon-api having identified a version of
// Node.js that ships with a correct implementation of N-API. // Node.js that ships with a correct implementation of N-API.
path.basename(item) !== 'nothing.node') { path.basename(item) !== 'nothing.node') {
process.platform === 'win32' ? process.platform === 'win32'
checkFileWin32(item) : ? checkFileWin32(item)
checkFileUNIX(item); : checkFileUNIX(item);
} }
})(item)); })(item));
}); });

@ -1,6 +1,6 @@
#! /usr/bin/env node #! /usr/bin/env node
'use strict' 'use strict';
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
@ -15,8 +15,9 @@ if (!dir) {
const NodeApiVersion = require('../package.json').version; const NodeApiVersion = require('../package.json').version;
const disable = args[1]; const disable = args[1];
if (disable != "--disable" && dir != "--disable") { let ConfigFileOperations;
var ConfigFileOperations = { if (disable !== '--disable' && dir !== '--disable') {
ConfigFileOperations = {
'package.json': [ 'package.json': [
[/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'], [/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'],
[/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, ''] [/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, '']
@ -25,26 +26,26 @@ if (disable != "--disable" && dir != "--disable") {
[/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','], [/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','],
[/([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \\"require(\'node-addon-api\').include_dir\\")",'], [/([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \\"require(\'node-addon-api\').include_dir\\")",'],
[/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''], [/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''],
[ /([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2xcode_settings$2: { $2GCC_ENABLE_CPP_EXCEPTIONS$2: $2YES$2,\n $2CLANG_CXX_LIBRARY$2: $2libc++$2,\n $2MACOSX_DEPLOYMENT_TARGET$2: $210.7$2,\n },\n $2msvs_settings$2: {\n $2VCCLCompilerTool$2: { $2ExceptionHandling$2: 1 },\n },' ], [/([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2xcode_settings$2: { $2GCC_ENABLE_CPP_EXCEPTIONS$2: $2YES$2,\n $2CLANG_CXX_LIBRARY$2: $2libc++$2,\n $2MACOSX_DEPLOYMENT_TARGET$2: $210.7$2,\n },\n $2msvs_settings$2: {\n $2VCCLCompilerTool$2: { $2ExceptionHandling$2: 1 },\n },']
] ]
}; };
} else { } else {
var ConfigFileOperations = { ConfigFileOperations = {
'package.json': [ 'package.json': [
[/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'], [/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'],
[/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, ''] [/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, '']
], ],
'binding.gyp': [ 'binding.gyp': [
[/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','], [/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'<!(node -p "require(\\\'node-addon-api\\\').include_dir")\','],
[ /([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \'require(\\\"node-addon-api\\\").include_dir\')",' ], [/([ ]*)"include_dirs": \[/g, '$1"include_dirs": [\n$1 "<!(node -p \'require(\\"node-addon-api\\").include_dir\')",'],
[/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''], [/[ ]*("|')<!\(node -e ("|'|\\"|\\')require\(("|'|\\"|\\')nan("|'|\\"|\\')\)("|'|\\"|\\')\)("|')(,|)[\r\n]/g, ''],
[ /([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2defines$2: [ $2NAPI_DISABLE_CPP_EXCEPTIONS$2 ],\n $2conditions$2: [\n [\'OS==\"win\"\', { $2defines$2: [ $2_HAS_EXCEPTIONS=1$2 ] }]\n ]' ], [/([ ]*)("|')target_name("|'): ("|')(.+?)("|'),/g, '$1$2target_name$2: $4$5$6,\n $2cflags!$2: [ $2-fno-exceptions$2 ],\n $2cflags_cc!$2: [ $2-fno-exceptions$2 ],\n $2defines$2: [ $2NAPI_DISABLE_CPP_EXCEPTIONS$2 ],\n $2conditions$2: [\n [\'OS=="win"\', { $2defines$2: [ $2_HAS_EXCEPTIONS=1$2 ] }]\n ]']
] ]
}; };
} }
var SourceFileOperations = [ const SourceFileOperations = [
[ /Nan::SetMethod\(target,[\s]*\"(.*)\"[\s]*,[\s]*([^)]+)\)/g, 'exports.Set(Napi::String::New(env, \"$1\"), Napi::Function::New(env, $2))' ], [/Nan::SetMethod\(target,[\s]*"(.*)"[\s]*,[\s]*([^)]+)\)/g, 'exports.Set(Napi::String::New(env, "$1"), Napi::Function::New(env, $2))'],
[/v8::Local<v8::FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], [/v8::Local<v8::FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'],
[/Local<FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'], [/Local<FunctionTemplate>\s+(\w+)\s*=\s*Nan::New<FunctionTemplate>\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'],
@ -62,7 +63,6 @@ var SourceFileOperations = [
[/v8::FunctionTemplate/g, 'Napi::FunctionReference'], [/v8::FunctionTemplate/g, 'Napi::FunctionReference'],
[/FunctionTemplate/g, 'Napi::FunctionReference'], [/FunctionTemplate/g, 'Napi::FunctionReference'],
[/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'], [/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'],
[/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm, [/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm,
'});\n\n' + '});\n\n' +
@ -70,7 +70,6 @@ var SourceFileOperations = [
'$1constructor.SuppressDestruct();\n' + '$1constructor.SuppressDestruct();\n' +
'$1target.Set("$2", $3);'], '$1target.Set("$2", $3);'],
// TODO: Other attribute combinations // TODO: Other attribute combinations
[/static_cast<PropertyAttribute>\(ReadOnly\s*\|\s*DontDelete\)/gm, [/static_cast<PropertyAttribute>\(ReadOnly\s*\|\s*DontDelete\)/gm,
'static_cast<napi_property_attributes>(napi_enumerable | napi_configurable)'], 'static_cast<napi_property_attributes>(napi_enumerable | napi_configurable)'],
@ -88,7 +87,7 @@ var SourceFileOperations = [
[/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'], [/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'],
[/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'], [/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'],
[ /(\w+)\(([^\)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3' ], [/(\w+)\(([^)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3'],
// HandleOKCallback to OnOK // HandleOKCallback to OnOK
[/HandleOKCallback/g, 'OnOK'], [/HandleOKCallback/g, 'OnOK'],
@ -101,7 +100,7 @@ var SourceFileOperations = [
// ex. Nan::New<Number>(info[0]) to Napi::Number::New(info[0]) // ex. Nan::New<Number>(info[0]) to Napi::Number::New(info[0])
[/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'], [/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'],
[ /Nan::New\(([0-9\.]+)\)/g, 'Napi::Number::New(env, $1)' ], [/Nan::New\(([0-9.]+)\)/g, 'Napi::Number::New(env, $1)'],
[/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'], [/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'],
[/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'], [/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'],
[/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'], [/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'],
@ -115,7 +114,6 @@ var SourceFileOperations = [
[/\.IsInt32\(\)/g, '.IsNumber()'], [/\.IsInt32\(\)/g, '.IsNumber()'],
[/->IsInt32\(\)/g, '.IsNumber()'], [/->IsInt32\(\)/g, '.IsNumber()'],
[/(.+?)->BooleanValue\(\)/g, '$1.As<Napi::Boolean>().Value()'], [/(.+?)->BooleanValue\(\)/g, '$1.As<Napi::Boolean>().Value()'],
[/(.+?)->Int32Value\(\)/g, '$1.As<Napi::Number>().Int32Value()'], [/(.+?)->Int32Value\(\)/g, '$1.As<Napi::Number>().Int32Value()'],
[/(.+?)->Uint32Value\(\)/g, '$1.As<Napi::Number>().Uint32Value()'], [/(.+?)->Uint32Value\(\)/g, '$1.As<Napi::Number>().Uint32Value()'],
@ -154,7 +152,6 @@ var SourceFileOperations = [
[/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'], [/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'],
[/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'], [/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'],
// ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer() // ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer()
[/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'], [/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'],
// ex. node::Buffer::Length(info[0]) to info[0].Length() // ex. node::Buffer::Length(info[0]) to info[0].Length()
@ -183,13 +180,10 @@ var SourceFileOperations = [
// [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ], // [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ],
[/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('], [/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('],
[/(.+)->Set\(/g, '$1.Set('],
[ /(.+)->Set\(/g, '$1.Set\(' ],
[/Nan::Callback/g, 'Napi::FunctionReference'], [/Nan::Callback/g, 'Napi::FunctionReference'],
[/Nan::Persistent<Object>/g, 'Napi::ObjectReference'], [/Nan::Persistent<Object>/g, 'Napi::ObjectReference'],
[/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'], [/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'],
@ -209,7 +203,6 @@ var SourceFileOperations = [
[/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'], [/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'],
[/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'], [/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'],
[/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'], [/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'],
[/constructor_template/g, 'constructor'], [/constructor_template/g, 'constructor'],
@ -226,16 +219,15 @@ var SourceFileOperations = [
[/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'], [/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'],
[/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'], [/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'],
// ex. Local<Value> to Napi::Value // ex. Local<Value> to Napi::Value
[/v8::Local<v8::(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'], [/v8::Local<v8::(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'],
[/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'], [/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'],
// Declare an env in helper functions that take a Napi::Value // Declare an env in helper functions that take a Napi::Value
[ /(\w+)\(Napi::Value (\w+)(,\s*[^\()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4' ], [/(\w+)\(Napi::Value (\w+)(,\s*[^()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4'],
// delete #include <node.h> and/or <v8.h> // delete #include <node.h> and/or <v8.h>
[ /#include +(<|")(?:node|nan).h("|>)/g, "#include $1napi.h$2\n#include $1uv.h$2" ], [/#include +(<|")(?:node|nan).h("|>)/g, '#include $1napi.h$2\n#include $1uv.h$2'],
// NODE_MODULE to NODE_API_MODULE // NODE_MODULE to NODE_API_MODULE
[/NODE_MODULE/g, 'NODE_API_MODULE'], [/NODE_MODULE/g, 'NODE_API_MODULE'],
[/Nan::/g, 'Napi::'], [/Nan::/g, 'Napi::'],
@ -253,17 +245,17 @@ var SourceFileOperations = [
// delete using v8::Local; // delete using v8::Local;
[/using v8::Local;\n/g, ''], [/using v8::Local;\n/g, ''],
// replace using v8::XXX; with using Napi::XXX // replace using v8::XXX; with using Napi::XXX
[ /using v8::([A-Za-z]+);/g, 'using Napi::$1;' ], [/using v8::([A-Za-z]+);/g, 'using Napi::$1;']
]; ];
var paths = listFiles(dir); const paths = listFiles(dir);
paths.forEach(function (dirEntry) { paths.forEach(function (dirEntry) {
var filename = dirEntry.split('\\').pop().split('/').pop(); const filename = dirEntry.split('\\').pop().split('/').pop();
// Check whether the file is a source file or a config file // Check whether the file is a source file or a config file
// then execute function accordingly // then execute function accordingly
var sourcePattern = /.+\.h|.+\.cc|.+\.cpp/; const sourcePattern = /.+\.h|.+\.cc|.+\.cpp/;
if (sourcePattern.test(filename)) { if (sourcePattern.test(filename)) {
convertFile(dirEntry, SourceFileOperations); convertFile(dirEntry, SourceFileOperations);
} else if (ConfigFileOperations[filename] != null) { } else if (ConfigFileOperations[filename] != null) {
@ -272,11 +264,11 @@ paths.forEach(function(dirEntry) {
}); });
function listFiles (dir, filelist) { function listFiles (dir, filelist) {
var files = fs.readdirSync(dir); const files = fs.readdirSync(dir);
filelist = filelist || []; filelist = filelist || [];
files.forEach(function (file) { files.forEach(function (file) {
if (file === 'node_modules') { if (file === 'node_modules') {
return return;
} }
if (fs.statSync(path.join(dir, file)).isDirectory()) { if (fs.statSync(path.join(dir, file)).isDirectory()) {
@ -290,14 +282,14 @@ function listFiles(dir, filelist) {
function convert (content, operations) { function convert (content, operations) {
for (let i = 0; i < operations.length; i++) { for (let i = 0; i < operations.length; i++) {
let operation = operations[i]; const operation = operations[i];
content = content.replace(operation[0], operation[1]); content = content.replace(operation[0], operation[1]);
} }
return content; return content;
} }
function convertFile (fileName, operations) { function convertFile (fileName, operations) {
fs.readFile(fileName, "utf-8", function (err, file) { fs.readFile(fileName, 'utf-8', function (err, file) {
if (err) throw err; if (err) throw err;
file = convert(file, operations); file = convert(file, operations);

@ -4,6 +4,8 @@ const spawn = require('child_process').spawnSync;
const filesToCheck = '*.js'; const filesToCheck = '*.js';
const FORMAT_START = process.env.FORMAT_START || 'main'; const FORMAT_START = process.env.FORMAT_START || 'main';
const IS_WIN = process.platform === 'win32';
const ESLINT_PATH = IS_WIN ? 'node_modules\\.bin\\eslint.cmd' : 'node_modules/.bin/eslint';
function main (args) { function main (args) {
let fix = false; let fix = false;
@ -44,10 +46,16 @@ function main (args) {
if (fix) { if (fix) {
options.push('--fix'); options.push('--fix');
} }
const result = spawn('node_modules/.bin/eslint', [...options], {
const result = spawn(ESLINT_PATH, [...options], {
encoding: 'utf-8' encoding: 'utf-8'
}); });
if (result.error && result.error.errno === 'ENOENT') {
console.error('Eslint not found! Eslint is supposed to be found at ', ESLINT_PATH);
return 2;
}
if (result.status === 1) { if (result.status === 1) {
console.error('Eslint error:', result.stdout); console.error('Eslint error:', result.stdout);
const fixCmd = 'npm run lint:fix'; const fixCmd = 'npm run lint:fix';

10
node_modules/tar/README.md generated vendored

@ -115,6 +115,8 @@ Handlers receive 3 arguments:
encountered an error which prevented it from being unpacked. This occurs encountered an error which prevented it from being unpacked. This occurs
when: when:
- an unrecoverable fs error happens during unpacking, - an unrecoverable fs error happens during unpacking,
- an entry is trying to extract into an excessively deep
location (by default, limited to 1024 subfolders),
- an entry has `..` in the path and `preservePaths` is not set, or - an entry has `..` in the path and `preservePaths` is not set, or
- an entry is extracting through a symbolic link, when `preservePaths` is - an entry is extracting through a symbolic link, when `preservePaths` is
not set. not set.
@ -427,6 +429,10 @@ The following options are supported:
`process.umask()` to determine the default umask value, since tar will `process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply extract with whatever mode is provided, and let the process `umask` apply
normally. normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.
The following options are mostly internal, but can be modified in some The following options are mostly internal, but can be modified in some
advanced use cases, such as re-using caches between runs. advanced use cases, such as re-using caches between runs.
@ -749,6 +755,10 @@ Most unpack errors will cause a `warn` event to be emitted. If the
`process.umask()` to determine the default umask value, since tar will `process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply extract with whatever mode is provided, and let the process `umask` apply
normally. normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.
### class tar.Unpack.Sync ### class tar.Unpack.Sync

@ -6,7 +6,7 @@ const normalizeCache = Object.create(null)
const { hasOwnProperty } = Object.prototype const { hasOwnProperty } = Object.prototype
module.exports = s => { module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s)) { if (!hasOwnProperty.call(normalizeCache, s)) {
normalizeCache[s] = s.normalize('NFKD') normalizeCache[s] = s.normalize('NFD')
} }
return normalizeCache[s] return normalizeCache[s]
} }

16
node_modules/tar/lib/pack.js generated vendored

@ -22,7 +22,7 @@ class PackJob {
} }
} }
const MiniPass = require('minipass') const { Minipass } = require('minipass')
const zlib = require('minizlib') const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js') const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js') const WriteEntry = require('./write-entry.js')
@ -56,7 +56,7 @@ const path = require('path')
const warner = require('./warn-mixin.js') const warner = require('./warn-mixin.js')
const normPath = require('./normalize-windows-path.js') const normPath = require('./normalize-windows-path.js')
const Pack = warner(class Pack extends MiniPass { const Pack = warner(class Pack extends Minipass {
constructor (opt) { constructor (opt) {
super(opt) super(opt)
opt = opt || Object.create(null) opt = opt || Object.create(null)
@ -79,6 +79,11 @@ const Pack = warner(class Pack extends MiniPass {
this.portable = !!opt.portable this.portable = !!opt.portable
this.zip = null this.zip = null
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
}
if (opt.gzip) { if (opt.gzip) {
if (typeof opt.gzip !== 'object') { if (typeof opt.gzip !== 'object') {
opt.gzip = {} opt.gzip = {}
@ -87,6 +92,13 @@ const Pack = warner(class Pack extends MiniPass {
opt.gzip.portable = true opt.gzip.portable = true
} }
this.zip = new zlib.Gzip(opt.gzip) this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
}
this.zip.on('data', chunk => super.write(chunk)) this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end()) this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]()) this.zip.on('drain', _ => this[ONDRAIN]())

49
node_modules/tar/lib/parse.js generated vendored

@ -97,6 +97,16 @@ module.exports = warner(class Parser extends EE {
this.strict = !!opt.strict this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = (opt.file && (
opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false
// have to set this so that streams are ok piping into it // have to set this so that streams are ok piping into it
this.writable = true this.writable = true
@ -347,7 +357,9 @@ module.exports = warner(class Parser extends EE {
} }
// first write, might be gzipped // first write, might be gzipped
if (this[UNZIP] === null && chunk) { const needSniff = this[UNZIP] === null ||
this.brotli === undefined && this[UNZIP] === false
if (needSniff && chunk) {
if (this[BUFFER]) { if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk]) chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null this[BUFFER] = null
@ -356,15 +368,45 @@ module.exports = warner(class Parser extends EE {
this[BUFFER] = chunk this[BUFFER] = chunk
return true return true
} }
// look for gzip header
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) { if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false this[UNZIP] = false
} }
} }
if (this[UNZIP] === null) {
const maybeBrotli = this.brotli === undefined
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true
} else {
this[BUFFER] = chunk
return true
}
} else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new Header(chunk.slice(0, 512))
this.brotli = false
} catch (_) {
this.brotli = true
}
}
}
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED] const ended = this[ENDED]
this[ENDED] = false this[ENDED] = false
this[UNZIP] = new zlib.Unzip() this[UNZIP] = this[UNZIP] === null
? new zlib.Unzip()
: new zlib.BrotliDecompress()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er)) this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => { this[UNZIP].on('end', _ => {
@ -502,6 +544,7 @@ module.exports = warner(class Parser extends EE {
this[UNZIP].end(chunk) this[UNZIP].end(chunk)
} else { } else {
this[ENDED] = true this[ENDED] = true
if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
this.write(chunk) this.write(chunk)
} }
} }

@ -123,7 +123,7 @@ module.exports = () => {
// effectively removing all parallelization on windows. // effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows // don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase() return stripSlashes(join(normalize(p))).toLowerCase()
}) })
const dirs = new Set( const dirs = new Set(

@ -1,9 +1,9 @@
'use strict' 'use strict'
const MiniPass = require('minipass') const { Minipass } = require('minipass')
const normPath = require('./normalize-windows-path.js') const normPath = require('./normalize-windows-path.js')
const SLURP = Symbol('slurp') const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass { module.exports = class ReadEntry extends Minipass {
constructor (header, ex, gex) { constructor (header, ex, gex) {
super() super()
// read entries always start life paused. this is to avoid the // read entries always start life paused. this is to avoid the

2
node_modules/tar/lib/replace.js generated vendored

@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required') throw new TypeError('file is required')
} }
if (opt.gzip) { if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives') throw new TypeError('cannot append to compressed archives')
} }

29
node_modules/tar/lib/unpack.js generated vendored

@ -48,6 +48,7 @@ const crypto = require('crypto')
const getFlag = require('./get-write-flag.js') const getFlag = require('./get-write-flag.js')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32' const isWindows = platform === 'win32'
const DEFAULT_MAX_DEPTH = 1024
// Unlinks on Windows are not atomic. // Unlinks on Windows are not atomic.
// //
@ -105,7 +106,7 @@ const uint32 = (a, b, c) =>
// Note that on windows, we always drop the entire cache whenever a // Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible // symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures. // to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path))) const cacheKeyNormalize = path => stripSlash(normPath(normalize(path)))
.toLowerCase() .toLowerCase()
const pruneCache = (cache, abs) => { const pruneCache = (cache, abs) => {
@ -181,6 +182,12 @@ class Unpack extends Parser {
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null process.getgid() : null
// prevent excessively deep nesting of subfolders
// set to `Infinity` to remove this restriction
this.maxDepth = typeof opt.maxDepth === 'number'
? opt.maxDepth
: DEFAULT_MAX_DEPTH
// mostly just for testing, but useful in some cases. // mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what // Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true this.forceChown = opt.forceChown === true
@ -238,13 +245,13 @@ class Unpack extends Parser {
} }
[CHECKPATH] (entry) { [CHECKPATH] (entry) {
const p = normPath(entry.path)
const parts = p.split('/')
if (this.strip) { if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip) { if (parts.length < this.strip) {
return false return false
} }
entry.path = parts.slice(this.strip).join('/')
if (entry.type === 'Link') { if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/') const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip) { if (linkparts.length >= this.strip) {
@ -253,11 +260,21 @@ class Unpack extends Parser {
return false return false
} }
} }
parts.splice(0, this.strip)
entry.path = parts.join('/')
}
if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
entry,
path: p,
depth: parts.length,
maxDepth: this.maxDepth,
})
return false
} }
if (!this.preservePaths) { if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) { if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry, entry,

2
node_modules/tar/lib/update.js generated vendored

@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required') throw new TypeError('file is required')
} }
if (opt.gzip) { if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives') throw new TypeError('cannot append to compressed archives')
} }

@ -1,5 +1,5 @@
'use strict' 'use strict'
const MiniPass = require('minipass') const { Minipass } = require('minipass')
const Pax = require('./pax.js') const Pax = require('./pax.js')
const Header = require('./header.js') const Header = require('./header.js')
const fs = require('fs') const fs = require('fs')
@ -41,7 +41,7 @@ const stripAbsolutePath = require('./strip-absolute-path.js')
const modeFix = require('./mode-fix.js') const modeFix = require('./mode-fix.js')
const WriteEntry = warner(class WriteEntry extends MiniPass { const WriteEntry = warner(class WriteEntry extends Minipass {
constructor (p, opt) { constructor (p, opt) {
opt = opt || {} opt = opt || {}
super(opt) super(opt)
@ -417,7 +417,7 @@ class WriteEntrySync extends WriteEntry {
} }
} }
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { const WriteEntryTar = warner(class WriteEntryTar extends Minipass {
constructor (readEntry, opt) { constructor (readEntry, opt) {
opt = opt || {} opt = opt || {}
super(opt) super(opt)

17
node_modules/tar/package.json generated vendored

@ -2,32 +2,27 @@
"author": "GitHub Inc.", "author": "GitHub Inc.",
"name": "tar", "name": "tar",
"description": "tar for node", "description": "tar for node",
"version": "6.1.13", "version": "6.2.1",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/npm/node-tar.git" "url": "https://github.com/isaacs/node-tar.git"
}, },
"scripts": { "scripts": {
"genparse": "node scripts/generate-parse-fixtures.js", "genparse": "node scripts/generate-parse-fixtures.js",
"template-oss-apply": "template-oss-apply --force",
"lint": "eslint \"**/*.js\"",
"postlint": "template-oss-check",
"lintfix": "npm run lint -- --fix",
"snap": "tap", "snap": "tap",
"test": "tap", "test": "tap"
"posttest": "npm run lint"
}, },
"dependencies": { "dependencies": {
"chownr": "^2.0.0", "chownr": "^2.0.0",
"fs-minipass": "^2.0.0", "fs-minipass": "^2.0.0",
"minipass": "^4.0.0", "minipass": "^5.0.0",
"minizlib": "^2.1.1", "minizlib": "^2.1.1",
"mkdirp": "^1.0.3", "mkdirp": "^1.0.3",
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
"devDependencies": { "devDependencies": {
"@npmcli/eslint-config": "^4.0.0", "@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.10.0", "@npmcli/template-oss": "4.11.0",
"chmodr": "^1.2.0", "chmodr": "^1.2.0",
"end-of-stream": "^1.4.3", "end-of-stream": "^1.4.3",
"events-to-array": "^2.0.3", "events-to-array": "^2.0.3",
@ -55,7 +50,7 @@
}, },
"templateOSS": { "templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.10.0", "version": "4.11.0",
"content": "scripts/template-oss", "content": "scripts/template-oss",
"engines": ">=10", "engines": ">=10",
"distPaths": [ "distPaths": [

206
package-lock.json generated

@ -17,7 +17,7 @@
"@google-cloud/storage": "^6.10.1", "@google-cloud/storage": "^6.10.1",
"adminjs": "^6.7.2", "adminjs": "^6.7.2",
"axios": "^1.7.2", "axios": "^1.7.2",
"bcrypt": "^5.0.0", "bcrypt": "^5.1.1",
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"boom": "^7.3.0", "boom": "^7.3.0",
"chalk": "^4.1.0", "chalk": "^4.1.0",
@ -2697,9 +2697,10 @@
} }
}, },
"node_modules/@mapbox/node-pre-gyp": { "node_modules/@mapbox/node-pre-gyp": {
"version": "1.0.10", "version": "1.0.11",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
"integrity": "sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
"license": "BSD-3-Clause",
"dependencies": { "dependencies": {
"detect-libc": "^2.0.0", "detect-libc": "^2.0.0",
"https-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0",
@ -2715,21 +2716,11 @@
"node-pre-gyp": "bin/node-pre-gyp" "node-pre-gyp": "bin/node-pre-gyp"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
"integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
"license": "MIT",
"dependencies": { "dependencies": {
"semver": "^6.0.0" "semver": "^6.0.0"
}, },
@ -2741,20 +2732,19 @@
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": {
"version": "6.3.0", "version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"license": "ISC",
"bin": { "bin": {
"semver": "bin/semver.js" "semver": "bin/semver.js"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/semver": { "node_modules/@mapbox/node-pre-gyp/node_modules/semver": {
"version": "7.3.8", "version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"dependencies": { "license": "ISC",
"lru-cache": "^6.0.0"
},
"bin": { "bin": {
"semver": "bin/semver.js" "semver": "bin/semver.js"
}, },
@ -2762,11 +2752,6 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/@mapbox/node-pre-gyp/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/@popperjs/core": { "node_modules/@popperjs/core": {
"version": "2.11.6", "version": "2.11.6",
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.6.tgz", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.6.tgz",
@ -4004,7 +3989,8 @@
"node_modules/aproba": { "node_modules/aproba": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
"integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==",
"license": "ISC"
}, },
"node_modules/archiver": { "node_modules/archiver": {
"version": "5.3.1", "version": "5.3.1",
@ -4084,6 +4070,8 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"delegates": "^1.0.0", "delegates": "^1.0.0",
"readable-stream": "^3.6.0" "readable-stream": "^3.6.0"
@ -4318,12 +4306,13 @@
] ]
}, },
"node_modules/bcrypt": { "node_modules/bcrypt": {
"version": "5.1.0", "version": "5.1.1",
"resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.0.tgz", "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
"integrity": "sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==", "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
"hasInstallScript": true, "hasInstallScript": true,
"license": "MIT",
"dependencies": { "dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10", "@mapbox/node-pre-gyp": "^1.0.11",
"node-addon-api": "^5.0.0" "node-addon-api": "^5.0.0"
}, },
"engines": { "engines": {
@ -4723,6 +4712,7 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
"license": "ISC",
"engines": { "engines": {
"node": ">=10" "node": ">=10"
} }
@ -4861,6 +4851,7 @@
"version": "1.1.3", "version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
"license": "ISC",
"bin": { "bin": {
"color-support": "bin.js" "color-support": "bin.js"
} }
@ -4936,7 +4927,8 @@
"node_modules/console-control-strings": { "node_modules/console-control-strings": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
"integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==",
"license": "ISC"
}, },
"node_modules/content-disposition": { "node_modules/content-disposition": {
"version": "0.5.4", "version": "0.5.4",
@ -5270,7 +5262,8 @@
"node_modules/delegates": { "node_modules/delegates": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==",
"license": "MIT"
}, },
"node_modules/denque": { "node_modules/denque": {
"version": "1.5.1", "version": "1.5.1",
@ -5303,9 +5296,10 @@
} }
}, },
"node_modules/detect-libc": { "node_modules/detect-libc": {
"version": "2.0.1", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
"license": "Apache-2.0",
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@ -6798,6 +6792,7 @@
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
"license": "ISC",
"dependencies": { "dependencies": {
"minipass": "^3.0.0" "minipass": "^3.0.0"
}, },
@ -6809,6 +6804,7 @@
"version": "3.3.6", "version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"license": "ISC",
"dependencies": { "dependencies": {
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
@ -6819,7 +6815,8 @@
"node_modules/fs-minipass/node_modules/yallist": { "node_modules/fs-minipass/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/fs.realpath": { "node_modules/fs.realpath": {
"version": "1.0.0", "version": "1.0.0",
@ -6923,6 +6920,8 @@
"version": "3.0.2", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
"integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"aproba": "^1.0.3 || ^2.0.0", "aproba": "^1.0.3 || ^2.0.0",
"color-support": "^1.1.2", "color-support": "^1.1.2",
@ -7519,7 +7518,8 @@
"node_modules/has-unicode": { "node_modules/has-unicode": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
"integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==",
"license": "ISC"
}, },
"node_modules/hashlru": { "node_modules/hashlru": {
"version": "2.3.0", "version": "2.3.0",
@ -8694,25 +8694,19 @@
} }
}, },
"node_modules/minipass": { "node_modules/minipass": {
"version": "4.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dependencies": { "license": "ISC",
"yallist": "^4.0.0"
},
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/minipass/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/minizlib": { "node_modules/minizlib": {
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
"license": "MIT",
"dependencies": { "dependencies": {
"minipass": "^3.0.0", "minipass": "^3.0.0",
"yallist": "^4.0.0" "yallist": "^4.0.0"
@ -8725,6 +8719,7 @@
"version": "3.3.6", "version": "3.3.6",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
"integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
"license": "ISC",
"dependencies": { "dependencies": {
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
@ -8735,7 +8730,8 @@
"node_modules/minizlib/node_modules/yallist": { "node_modules/minizlib/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/mkdirp": { "node_modules/mkdirp": {
"version": "1.0.4", "version": "1.0.4",
@ -9137,9 +9133,10 @@
} }
}, },
"node_modules/node-addon-api": { "node_modules/node-addon-api": {
"version": "5.0.0", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.0.0.tgz", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==" "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==",
"license": "MIT"
}, },
"node_modules/node-cron": { "node_modules/node-cron": {
"version": "3.0.2", "version": "3.0.2",
@ -9272,6 +9269,7 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
"integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
"license": "ISC",
"dependencies": { "dependencies": {
"abbrev": "1" "abbrev": "1"
}, },
@ -9294,6 +9292,8 @@
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
"deprecated": "This package is no longer supported.",
"license": "ISC",
"dependencies": { "dependencies": {
"are-we-there-yet": "^2.0.0", "are-we-there-yet": "^2.0.0",
"console-control-strings": "^1.1.0", "console-control-strings": "^1.1.0",
@ -10989,7 +10989,8 @@
"node_modules/set-blocking": { "node_modules/set-blocking": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
"license": "ISC"
}, },
"node_modules/set-cookie-parser": { "node_modules/set-cookie-parser": {
"version": "2.5.1", "version": "2.5.1",
@ -11471,13 +11472,14 @@
} }
}, },
"node_modules/tar": { "node_modules/tar": {
"version": "6.1.13", "version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"license": "ISC",
"dependencies": { "dependencies": {
"chownr": "^2.0.0", "chownr": "^2.0.0",
"fs-minipass": "^2.0.0", "fs-minipass": "^2.0.0",
"minipass": "^4.0.0", "minipass": "^5.0.0",
"minizlib": "^2.1.1", "minizlib": "^2.1.1",
"mkdirp": "^1.0.3", "mkdirp": "^1.0.3",
"yallist": "^4.0.0" "yallist": "^4.0.0"
@ -11514,7 +11516,8 @@
"node_modules/tar/node_modules/yallist": { "node_modules/tar/node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
}, },
"node_modules/teeny-request": { "node_modules/teeny-request": {
"version": "8.0.3", "version": "8.0.3",
@ -12449,6 +12452,7 @@
"version": "1.1.5", "version": "1.1.5",
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
"integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
"license": "ISC",
"dependencies": { "dependencies": {
"string-width": "^1.0.2 || 2 || 3 || 4" "string-width": "^1.0.2 || 2 || 3 || 4"
} }
@ -14636,9 +14640,9 @@
"integrity": "sha512-Xs/4RZltsAL7pkvaNStUQt7netTkyxrS0K+RILcVr3TRMS/ToOg4I6uNfhB9SlGsnWBym4U+EaXq0f0cEMNkHA==" "integrity": "sha512-Xs/4RZltsAL7pkvaNStUQt7netTkyxrS0K+RILcVr3TRMS/ToOg4I6uNfhB9SlGsnWBym4U+EaXq0f0cEMNkHA=="
}, },
"@mapbox/node-pre-gyp": { "@mapbox/node-pre-gyp": {
"version": "1.0.10", "version": "1.0.11",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
"integrity": "sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
"requires": { "requires": {
"detect-libc": "^2.0.0", "detect-libc": "^2.0.0",
"https-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0",
@ -14651,14 +14655,6 @@
"tar": "^6.1.11" "tar": "^6.1.11"
}, },
"dependencies": { "dependencies": {
"lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"requires": {
"yallist": "^4.0.0"
}
},
"make-dir": { "make-dir": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
@ -14668,24 +14664,16 @@
}, },
"dependencies": { "dependencies": {
"semver": { "semver": {
"version": "6.3.0", "version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="
} }
} }
}, },
"semver": { "semver": {
"version": "7.3.8", "version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A=="
"requires": {
"lru-cache": "^6.0.0"
}
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
} }
} }
}, },
@ -15873,11 +15861,11 @@
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="
}, },
"bcrypt": { "bcrypt": {
"version": "5.1.0", "version": "5.1.1",
"resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.0.tgz", "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
"integrity": "sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==", "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
"requires": { "requires": {
"@mapbox/node-pre-gyp": "^1.0.10", "@mapbox/node-pre-gyp": "^1.0.11",
"node-addon-api": "^5.0.0" "node-addon-api": "^5.0.0"
} }
}, },
@ -16582,9 +16570,9 @@
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="
}, },
"detect-libc": { "detect-libc": {
"version": "2.0.1", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==" "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw=="
}, },
"dezalgo": { "dezalgo": {
"version": "1.0.4", "version": "1.0.4",
@ -19349,19 +19337,9 @@
"integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==" "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g=="
}, },
"minipass": { "minipass": {
"version": "4.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="
"requires": {
"yallist": "^4.0.0"
},
"dependencies": {
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}, },
"minizlib": { "minizlib": {
"version": "2.1.2", "version": "2.1.2",
@ -19686,9 +19664,9 @@
"integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==" "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg=="
}, },
"node-addon-api": { "node-addon-api": {
"version": "5.0.0", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.0.0.tgz", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA==" "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
}, },
"node-cron": { "node-cron": {
"version": "3.0.2", "version": "3.0.2",
@ -21454,13 +21432,13 @@
"integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="
}, },
"tar": { "tar": {
"version": "6.1.13", "version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"requires": { "requires": {
"chownr": "^2.0.0", "chownr": "^2.0.0",
"fs-minipass": "^2.0.0", "fs-minipass": "^2.0.0",
"minipass": "^4.0.0", "minipass": "^5.0.0",
"minizlib": "^2.1.1", "minizlib": "^2.1.1",
"mkdirp": "^1.0.3", "mkdirp": "^1.0.3",
"yallist": "^4.0.0" "yallist": "^4.0.0"

@ -18,7 +18,7 @@
"@google-cloud/storage": "^6.10.1", "@google-cloud/storage": "^6.10.1",
"adminjs": "^6.7.2", "adminjs": "^6.7.2",
"axios": "^1.7.2", "axios": "^1.7.2",
"bcrypt": "^5.0.0", "bcrypt": "^5.1.1",
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"boom": "^7.3.0", "boom": "^7.3.0",
"chalk": "^4.1.0", "chalk": "^4.1.0",

Loading…
Cancel
Save