This commit is contained in:
Ash
2023-01-24 17:15:57 +04:00
parent 21898e64fb
commit 26ebf6ec64
1117 changed files with 239106 additions and 0 deletions

12
node_modules/.bin/prebuild-install generated vendored Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@"
else
exec node "$basedir/../prebuild-install/bin.js" "$@"
fi

17
node_modules/.bin/prebuild-install.cmd generated vendored Normal file
View File

@@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %*

28
node_modules/.bin/prebuild-install.ps1 generated vendored Normal file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args
} else {
& "node$exe" "$basedir/../prebuild-install/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/rc generated vendored Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../rc/cli.js" "$@"
else
exec node "$basedir/../rc/cli.js" "$@"
fi

17
node_modules/.bin/rc.cmd generated vendored Normal file
View File

@@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %*

28
node_modules/.bin/rc.ps1 generated vendored Normal file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rc/cli.js" $args
} else {
& "node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/semver generated vendored Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

17
node_modules/.bin/semver.cmd generated vendored Normal file
View File

@@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %*

28
node_modules/.bin/semver.ps1 generated vendored Normal file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

473
node_modules/.package-lock.json generated vendored
View File

@@ -30,6 +30,48 @@
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"dependencies": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"node_modules/bl/node_modules/readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
@@ -75,6 +117,29 @@
"concat-map": "0.0.1"
}
},
"node_modules/buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"node_modules/buffer-from": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
@@ -111,6 +176,48 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"node_modules/color": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
"dependencies": {
"color-convert": "^2.0.1",
"color-string": "^1.9.0"
},
"engines": {
"node": ">=12.5.0"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/color-string": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
"dependencies": {
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -167,6 +274,14 @@
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
},
"node_modules/crc-32": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-0.3.0.tgz",
"integrity": "sha512-kucVIjOmMc1f0tv53BJ/5WIX+MGLcKuoBhnGqQrgKJNqLByb/sVMWfW/Aw6hw0jgcqjJ2pi9E5y32zOIpaUlsA==",
"engines": {
"node": ">=0.8"
}
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
@@ -183,6 +298,28 @@
}
}
},
"node_modules/decompress-response": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
"integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
"dependencies": {
"mimic-response": "^3.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
@@ -200,6 +337,14 @@
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/detect-libc": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==",
"engines": {
"node": ">=8"
}
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
@@ -213,6 +358,14 @@
"node": ">= 0.8"
}
},
"node_modules/end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"dependencies": {
"once": "^1.4.0"
}
},
"node_modules/escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
@@ -226,6 +379,14 @@
"node": ">= 0.6"
}
},
"node_modules/expand-template": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
"integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==",
"engines": {
"node": ">=6"
}
},
"node_modules/express": {
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
@@ -345,6 +506,11 @@
"node": ">= 0.6"
}
},
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -368,6 +534,11 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/github-from-package": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz",
"integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="
},
"node_modules/glob": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
@@ -435,6 +606,25 @@
"node": ">=0.10.0"
}
},
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
@@ -449,6 +639,11 @@
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/ini": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
},
"node_modules/ipaddr.js": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
@@ -457,11 +652,27 @@
"node": ">= 0.10"
}
},
"node_modules/is-arrayish": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
},
"node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@@ -513,6 +724,17 @@
"node": ">= 0.6"
}
},
"node_modules/mimic-response": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
"integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==",
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
@@ -543,6 +765,11 @@
"mkdirp": "bin/cmd.js"
}
},
"node_modules/mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
@@ -565,6 +792,11 @@
"node": ">= 6.0.0"
}
},
"node_modules/napi-build-utils": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz",
"integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg=="
},
"node_modules/negotiator": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
@@ -573,6 +805,22 @@
"node": ">= 0.6"
}
},
"node_modules/node-abi": {
"version": "3.31.0",
"resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz",
"integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==",
"dependencies": {
"semver": "^7.3.5"
},
"engines": {
"node": ">=10"
}
},
"node_modules/node-addon-api": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
},
"node_modules/node-rest-client": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/node-rest-client/-/node-rest-client-3.1.1.tgz",
@@ -642,6 +890,53 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"node_modules/png-chunk-text": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/png-chunk-text/-/png-chunk-text-1.0.0.tgz",
"integrity": "sha512-DEROKU3SkkLGWNMzru3xPVgxyd48UGuMSZvioErCure6yhOc/pRH2ZV+SEn7nmaf7WNf3NdIpH+UTrRdKyq9Lw=="
},
"node_modules/png-chunks-encode": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/png-chunks-encode/-/png-chunks-encode-1.0.0.tgz",
"integrity": "sha512-J1jcHgbQRsIIgx5wxW9UmCymV3wwn4qCCJl6KYgEU/yHCh/L2Mwq/nMOkRPtmV79TLxRZj5w3tH69pvygFkDqA==",
"dependencies": {
"crc-32": "^0.3.0",
"sliced": "^1.0.1"
}
},
"node_modules/png-chunks-extract": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/png-chunks-extract/-/png-chunks-extract-1.0.0.tgz",
"integrity": "sha512-ZiVwF5EJ0DNZyzAqld8BP1qyJBaGOFaq9zl579qfbkcmOwWLLO4I9L8i2O4j3HkI6/35i0nKG2n+dZplxiT89Q==",
"dependencies": {
"crc-32": "^0.3.0"
}
},
"node_modules/prebuild-install": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz",
"integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==",
"dependencies": {
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",
"github-from-package": "0.0.0",
"minimist": "^1.2.3",
"mkdirp-classic": "^0.5.3",
"napi-build-utils": "^1.0.1",
"node-abi": "^3.3.0",
"pump": "^3.0.0",
"rc": "^1.2.7",
"simple-get": "^4.0.0",
"tar-fs": "^2.0.0",
"tunnel-agent": "^0.6.0"
},
"bin": {
"prebuild-install": "bin.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@@ -659,6 +954,15 @@
"node": ">= 0.10"
}
},
"node_modules/pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"dependencies": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"node_modules/qs": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
@@ -695,6 +999,20 @@
"node": ">= 0.8"
}
},
"node_modules/rc": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
"dependencies": {
"deep-extend": "^0.6.0",
"ini": "~1.3.0",
"minimist": "^1.2.0",
"strip-json-comments": "~2.0.1"
},
"bin": {
"rc": "cli.js"
}
},
"node_modules/readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
@@ -757,6 +1075,20 @@
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/send": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz",
@@ -817,6 +1149,28 @@
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"node_modules/sharp": {
"version": "0.31.3",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.31.3.tgz",
"integrity": "sha512-XcR4+FCLBFKw1bdB+GEhnUNXNXvnt0tDo4WsBsraKymuo/IAuPuCBVAL2wIkUw2r/dwFW5Q5+g66Kwl2dgDFVg==",
"hasInstallScript": true,
"dependencies": {
"color": "^4.2.3",
"detect-libc": "^2.0.1",
"node-addon-api": "^5.0.0",
"prebuild-install": "^7.1.1",
"semver": "^7.3.8",
"simple-get": "^4.0.1",
"tar-fs": "^2.1.1",
"tunnel-agent": "^0.6.0"
},
"engines": {
"node": ">=14.15.0"
},
"funding": {
"url": "https://opencollective.com/libvips"
}
},
"node_modules/side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
@@ -830,6 +1184,62 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/simple-concat": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz",
"integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/simple-get": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz",
"integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"decompress-response": "^6.0.0",
"once": "^1.3.1",
"simple-concat": "^1.0.0"
}
},
"node_modules/simple-swizzle": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
"dependencies": {
"is-arrayish": "^0.3.1"
}
},
"node_modules/sliced": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz",
"integrity": "sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA=="
},
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@@ -859,6 +1269,53 @@
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"node_modules/strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/tar-fs": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
"integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
"dependencies": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.1.4"
}
},
"node_modules/tar-stream": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"dependencies": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
},
"engines": {
"node": ">=6"
}
},
"node_modules/tar-stream/node_modules/readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@@ -867,6 +1324,17 @@
"node": ">=0.6"
}
},
"node_modules/tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
"dependencies": {
"safe-buffer": "^5.0.1"
},
"engines": {
"node": "*"
}
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
@@ -945,6 +1413,11 @@
"engines": {
"node": ">=0.4"
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}

21
node_modules/base64-js/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Jameson Little
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

34
node_modules/base64-js/README.md generated vendored Normal file
View File

@@ -0,0 +1,34 @@
base64-js
=========
`base64-js` does basic base64 encoding/decoding in pure JS.
[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js)
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
## install
With [npm](https://npmjs.org) do:
`npm install base64-js` and `var base64js = require('base64-js')`
For use in web browsers do:
`<script src="base64js.min.js"></script>`
[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme)
## methods
`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
* `byteLength` - Takes a base64 string and returns length of byte array
* `toByteArray` - Takes a base64 string and returns a byte array
* `fromByteArray` - Takes a byte array and returns a base64 string
## license
MIT

1
node_modules/base64-js/base64js.min.js generated vendored Normal file
View File

@@ -0,0 +1 @@
(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c<g.length;c++)a(g[c]);return a}return b}()({"/":[function(a,b,c){'use strict';function d(a){var b=a.length;if(0<b%4)throw new Error("Invalid string. Length must be a multiple of 4");var c=a.indexOf("=");-1===c&&(c=b);var d=c===b?0:4-c%4;return[c,d]}function e(a,b,c){return 3*(b+c)/4-c}function f(a){var b,c,f=d(a),g=f[0],h=f[1],j=new m(e(a,g,h)),k=0,n=0<h?g-4:g;for(c=0;c<n;c+=4)b=l[a.charCodeAt(c)]<<18|l[a.charCodeAt(c+1)]<<12|l[a.charCodeAt(c+2)]<<6|l[a.charCodeAt(c+3)],j[k++]=255&b>>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;f<c;f+=3)d=(16711680&a[f]<<16)+(65280&a[f+1]<<8)+(255&a[f+2]),e.push(g(d));return e.join("")}function j(a){for(var b,c=a.length,d=c%3,e=[],f=16383,g=0,j=c-d;g<j;g+=f)e.push(h(a,g,g+f>j?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o<p;++o)k[o]=n[o],l[n.charCodeAt(o)]=o;l[45]=62,l[95]=63},{}]},{},[])("/")});

3
node_modules/base64-js/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export function byteLength(b64: string): number;
export function toByteArray(b64: string): Uint8Array;
export function fromByteArray(uint8: Uint8Array): string;

150
node_modules/base64-js/index.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
'use strict'
exports.byteLength = byteLength
exports.toByteArray = toByteArray
exports.fromByteArray = fromByteArray
var lookup = []
var revLookup = []
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
for (var i = 0, len = code.length; i < len; ++i) {
lookup[i] = code[i]
revLookup[code.charCodeAt(i)] = i
}
// Support decoding URL-safe base64 strings, as Node.js does.
// See: https://en.wikipedia.org/wiki/Base64#URL_applications
revLookup['-'.charCodeAt(0)] = 62
revLookup['_'.charCodeAt(0)] = 63
function getLens (b64) {
var len = b64.length
if (len % 4 > 0) {
throw new Error('Invalid string. Length must be a multiple of 4')
}
// Trim off extra bytes after placeholder bytes are found
// See: https://github.com/beatgammit/base64-js/issues/42
var validLen = b64.indexOf('=')
if (validLen === -1) validLen = len
var placeHoldersLen = validLen === len
? 0
: 4 - (validLen % 4)
return [validLen, placeHoldersLen]
}
// base64 is 4/3 + up to two characters of the original data
function byteLength (b64) {
var lens = getLens(b64)
var validLen = lens[0]
var placeHoldersLen = lens[1]
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
}
function _byteLength (b64, validLen, placeHoldersLen) {
return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen
}
function toByteArray (b64) {
var tmp
var lens = getLens(b64)
var validLen = lens[0]
var placeHoldersLen = lens[1]
var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen))
var curByte = 0
// if there are placeholders, only get up to the last complete 4 chars
var len = placeHoldersLen > 0
? validLen - 4
: validLen
var i
for (i = 0; i < len; i += 4) {
tmp =
(revLookup[b64.charCodeAt(i)] << 18) |
(revLookup[b64.charCodeAt(i + 1)] << 12) |
(revLookup[b64.charCodeAt(i + 2)] << 6) |
revLookup[b64.charCodeAt(i + 3)]
arr[curByte++] = (tmp >> 16) & 0xFF
arr[curByte++] = (tmp >> 8) & 0xFF
arr[curByte++] = tmp & 0xFF
}
if (placeHoldersLen === 2) {
tmp =
(revLookup[b64.charCodeAt(i)] << 2) |
(revLookup[b64.charCodeAt(i + 1)] >> 4)
arr[curByte++] = tmp & 0xFF
}
if (placeHoldersLen === 1) {
tmp =
(revLookup[b64.charCodeAt(i)] << 10) |
(revLookup[b64.charCodeAt(i + 1)] << 4) |
(revLookup[b64.charCodeAt(i + 2)] >> 2)
arr[curByte++] = (tmp >> 8) & 0xFF
arr[curByte++] = tmp & 0xFF
}
return arr
}
function tripletToBase64 (num) {
return lookup[num >> 18 & 0x3F] +
lookup[num >> 12 & 0x3F] +
lookup[num >> 6 & 0x3F] +
lookup[num & 0x3F]
}
function encodeChunk (uint8, start, end) {
var tmp
var output = []
for (var i = start; i < end; i += 3) {
tmp =
((uint8[i] << 16) & 0xFF0000) +
((uint8[i + 1] << 8) & 0xFF00) +
(uint8[i + 2] & 0xFF)
output.push(tripletToBase64(tmp))
}
return output.join('')
}
function fromByteArray (uint8) {
var tmp
var len = uint8.length
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
var parts = []
var maxChunkLength = 16383 // must be multiple of 3
// go through the array every three bytes, we'll deal with trailing stuff later
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
}
// pad the end with zeros, but make sure to not forget the extra bytes
if (extraBytes === 1) {
tmp = uint8[len - 1]
parts.push(
lookup[tmp >> 2] +
lookup[(tmp << 4) & 0x3F] +
'=='
)
} else if (extraBytes === 2) {
tmp = (uint8[len - 2] << 8) + uint8[len - 1]
parts.push(
lookup[tmp >> 10] +
lookup[(tmp >> 4) & 0x3F] +
lookup[(tmp << 2) & 0x3F] +
'='
)
}
return parts.join('')
}

47
node_modules/base64-js/package.json generated vendored Normal file
View File

@@ -0,0 +1,47 @@
{
"name": "base64-js",
"description": "Base64 encoding/decoding in pure JS",
"version": "1.5.1",
"author": "T. Jameson Little <t.jameson.little@gmail.com>",
"typings": "index.d.ts",
"bugs": {
"url": "https://github.com/beatgammit/base64-js/issues"
},
"devDependencies": {
"babel-minify": "^0.5.1",
"benchmark": "^2.1.4",
"browserify": "^16.3.0",
"standard": "*",
"tape": "4.x"
},
"homepage": "https://github.com/beatgammit/base64-js",
"keywords": [
"base64"
],
"license": "MIT",
"main": "index.js",
"repository": {
"type": "git",
"url": "git://github.com/beatgammit/base64-js.git"
},
"scripts": {
"build": "browserify -s base64js -r ./ | minify > base64js.min.js",
"lint": "standard",
"test": "npm run lint && npm run unit",
"unit": "tape test/*.js"
},
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
}

17
node_modules/bl/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,17 @@
sudo: false
arch:
- amd64
- ppc64le
language: node_js
node_js:
- '6'
- '8'
- '10'
- '12'
- '14'
- '15'
- lts/*
notifications:
email:
- rod@vagg.org
- matteo.collina@gmail.com

396
node_modules/bl/BufferList.js generated vendored Normal file
View File

@@ -0,0 +1,396 @@
'use strict'
const { Buffer } = require('buffer')
const symbol = Symbol.for('BufferList')
function BufferList (buf) {
if (!(this instanceof BufferList)) {
return new BufferList(buf)
}
BufferList._init.call(this, buf)
}
BufferList._init = function _init (buf) {
Object.defineProperty(this, symbol, { value: true })
this._bufs = []
this.length = 0
if (buf) {
this.append(buf)
}
}
BufferList.prototype._new = function _new (buf) {
return new BufferList(buf)
}
BufferList.prototype._offset = function _offset (offset) {
if (offset === 0) {
return [0, 0]
}
let tot = 0
for (let i = 0; i < this._bufs.length; i++) {
const _t = tot + this._bufs[i].length
if (offset < _t || i === this._bufs.length - 1) {
return [i, offset - tot]
}
tot = _t
}
}
BufferList.prototype._reverseOffset = function (blOffset) {
const bufferId = blOffset[0]
let offset = blOffset[1]
for (let i = 0; i < bufferId; i++) {
offset += this._bufs[i].length
}
return offset
}
BufferList.prototype.get = function get (index) {
if (index > this.length || index < 0) {
return undefined
}
const offset = this._offset(index)
return this._bufs[offset[0]][offset[1]]
}
BufferList.prototype.slice = function slice (start, end) {
if (typeof start === 'number' && start < 0) {
start += this.length
}
if (typeof end === 'number' && end < 0) {
end += this.length
}
return this.copy(null, 0, start, end)
}
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
if (typeof srcStart !== 'number' || srcStart < 0) {
srcStart = 0
}
if (typeof srcEnd !== 'number' || srcEnd > this.length) {
srcEnd = this.length
}
if (srcStart >= this.length) {
return dst || Buffer.alloc(0)
}
if (srcEnd <= 0) {
return dst || Buffer.alloc(0)
}
const copy = !!dst
const off = this._offset(srcStart)
const len = srcEnd - srcStart
let bytes = len
let bufoff = (copy && dstStart) || 0
let start = off[1]
// copy/slice everything
if (srcStart === 0 && srcEnd === this.length) {
if (!copy) {
// slice, but full concat if multiple buffers
return this._bufs.length === 1
? this._bufs[0]
: Buffer.concat(this._bufs, this.length)
}
// copy, need to copy individual buffers
for (let i = 0; i < this._bufs.length; i++) {
this._bufs[i].copy(dst, bufoff)
bufoff += this._bufs[i].length
}
return dst
}
// easy, cheap case where it's a subset of one of the buffers
if (bytes <= this._bufs[off[0]].length - start) {
return copy
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
: this._bufs[off[0]].slice(start, start + bytes)
}
if (!copy) {
// a slice, we need something to copy in to
dst = Buffer.allocUnsafe(len)
}
for (let i = off[0]; i < this._bufs.length; i++) {
const l = this._bufs[i].length - start
if (bytes > l) {
this._bufs[i].copy(dst, bufoff, start)
bufoff += l
} else {
this._bufs[i].copy(dst, bufoff, start, start + bytes)
bufoff += l
break
}
bytes -= l
if (start) {
start = 0
}
}
// safeguard so that we don't return uninitialized memory
if (dst.length > bufoff) return dst.slice(0, bufoff)
return dst
}
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
start = start || 0
end = typeof end !== 'number' ? this.length : end
if (start < 0) {
start += this.length
}
if (end < 0) {
end += this.length
}
if (start === end) {
return this._new()
}
const startOffset = this._offset(start)
const endOffset = this._offset(end)
const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
if (endOffset[1] === 0) {
buffers.pop()
} else {
buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
}
if (startOffset[1] !== 0) {
buffers[0] = buffers[0].slice(startOffset[1])
}
return this._new(buffers)
}
BufferList.prototype.toString = function toString (encoding, start, end) {
return this.slice(start, end).toString(encoding)
}
BufferList.prototype.consume = function consume (bytes) {
// first, normalize the argument, in accordance with how Buffer does it
bytes = Math.trunc(bytes)
// do nothing if not a positive number
if (Number.isNaN(bytes) || bytes <= 0) return this
while (this._bufs.length) {
if (bytes >= this._bufs[0].length) {
bytes -= this._bufs[0].length
this.length -= this._bufs[0].length
this._bufs.shift()
} else {
this._bufs[0] = this._bufs[0].slice(bytes)
this.length -= bytes
break
}
}
return this
}
BufferList.prototype.duplicate = function duplicate () {
const copy = this._new()
for (let i = 0; i < this._bufs.length; i++) {
copy.append(this._bufs[i])
}
return copy
}
BufferList.prototype.append = function append (buf) {
if (buf == null) {
return this
}
if (buf.buffer) {
// append a view of the underlying ArrayBuffer
this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
} else if (Array.isArray(buf)) {
for (let i = 0; i < buf.length; i++) {
this.append(buf[i])
}
} else if (this._isBufferList(buf)) {
// unwrap argument into individual BufferLists
for (let i = 0; i < buf._bufs.length; i++) {
this.append(buf._bufs[i])
}
} else {
// coerce number arguments to strings, since Buffer(number) does
// uninitialized memory allocation
if (typeof buf === 'number') {
buf = buf.toString()
}
this._appendBuffer(Buffer.from(buf))
}
return this
}
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
this._bufs.push(buf)
this.length += buf.length
}
BufferList.prototype.indexOf = function (search, offset, encoding) {
if (encoding === undefined && typeof offset === 'string') {
encoding = offset
offset = undefined
}
if (typeof search === 'function' || Array.isArray(search)) {
throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
} else if (typeof search === 'number') {
search = Buffer.from([search])
} else if (typeof search === 'string') {
search = Buffer.from(search, encoding)
} else if (this._isBufferList(search)) {
search = search.slice()
} else if (Array.isArray(search.buffer)) {
search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
} else if (!Buffer.isBuffer(search)) {
search = Buffer.from(search)
}
offset = Number(offset || 0)
if (isNaN(offset)) {
offset = 0
}
if (offset < 0) {
offset = this.length + offset
}
if (offset < 0) {
offset = 0
}
if (search.length === 0) {
return offset > this.length ? this.length : offset
}
const blOffset = this._offset(offset)
let blIndex = blOffset[0] // index of which internal buffer we're working on
let buffOffset = blOffset[1] // offset of the internal buffer we're working on
// scan over each buffer
for (; blIndex < this._bufs.length; blIndex++) {
const buff = this._bufs[blIndex]
while (buffOffset < buff.length) {
const availableWindow = buff.length - buffOffset
if (availableWindow >= search.length) {
const nativeSearchResult = buff.indexOf(search, buffOffset)
if (nativeSearchResult !== -1) {
return this._reverseOffset([blIndex, nativeSearchResult])
}
buffOffset = buff.length - search.length + 1 // end of native search window
} else {
const revOffset = this._reverseOffset([blIndex, buffOffset])
if (this._match(revOffset, search)) {
return revOffset
}
buffOffset++
}
}
buffOffset = 0
}
return -1
}
BufferList.prototype._match = function (offset, search) {
if (this.length - offset < search.length) {
return false
}
for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
if (this.get(offset + searchOffset) !== search[searchOffset]) {
return false
}
}
return true
}
;(function () {
const methods = {
readDoubleBE: 8,
readDoubleLE: 8,
readFloatBE: 4,
readFloatLE: 4,
readInt32BE: 4,
readInt32LE: 4,
readUInt32BE: 4,
readUInt32LE: 4,
readInt16BE: 2,
readInt16LE: 2,
readUInt16BE: 2,
readUInt16LE: 2,
readInt8: 1,
readUInt8: 1,
readIntBE: null,
readIntLE: null,
readUIntBE: null,
readUIntLE: null
}
for (const m in methods) {
(function (m) {
if (methods[m] === null) {
BufferList.prototype[m] = function (offset, byteLength) {
return this.slice(offset, offset + byteLength)[m](0, byteLength)
}
} else {
BufferList.prototype[m] = function (offset = 0) {
return this.slice(offset, offset + methods[m])[m](0)
}
}
}(m))
}
}())
// Used internally by the class and also as an indicator of this object being
// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
// environment because there could be multiple different copies of the
// BufferList class and some `BufferList`s might be `BufferList`s.
BufferList.prototype._isBufferList = function _isBufferList (b) {
return b instanceof BufferList || BufferList.isBufferList(b)
}
BufferList.isBufferList = function isBufferList (b) {
return b != null && b[symbol]
}
module.exports = BufferList

13
node_modules/bl/LICENSE.md generated vendored Normal file
View File

@@ -0,0 +1,13 @@
The MIT License (MIT)
=====================
Copyright (c) 2013-2019 bl contributors
----------------------------------
*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

247
node_modules/bl/README.md generated vendored Normal file
View File

@@ -0,0 +1,247 @@
# bl *(BufferList)*
[![Build Status](https://api.travis-ci.com/rvagg/bl.svg?branch=master)](https://travis-ci.com/rvagg/bl/)
**A Node.js Buffer list collector, reader and streamer thingy.**
[![NPM](https://nodei.co/npm/bl.svg)](https://nodei.co/npm/bl/)
**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
```js
const { BufferList } = require('bl')
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append('hi') // bl will also accept & convert Strings
bl.append(Buffer.from('j'))
bl.append(Buffer.from([ 0x3, 0x4 ]))
console.log(bl.length) // 12
console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
console.log(bl.slice(3, 6).toString('ascii')) // 'def'
console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
console.log(bl.indexOf('def')) // 3
console.log(bl.indexOf('asdf')) // -1
// or just use toString!
console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
console.log(bl.toString('ascii', 3, 8)) // 'defgh'
console.log(bl.toString('ascii', 5, 10)) // 'fghij'
// other standard Buffer readables
console.log(bl.readUInt16BE(10)) // 0x0304
console.log(bl.readUInt16LE(10)) // 0x0403
```
Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
```js
const { BufferListStream } = require('bl')
const fs = require('fs')
fs.createReadStream('README.md')
.pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required
// `data` is a complete Buffer object containing the full data
console.log(data.toString())
}))
```
Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
```js
const hyperquest = require('hyperquest')
const { BufferListStream } = require('bl')
const url = 'https://raw.github.com/rvagg/bl/master/README.md'
hyperquest(url).pipe(BufferListStream((err, data) => {
console.log(data.toString())
}))
```
Or, use it as a readable stream to recompose a list of Buffers to an output source:
```js
const { BufferListStream } = require('bl')
const fs = require('fs')
var bl = new BufferListStream()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
bl.pipe(fs.createWriteStream('gibberish.txt'))
```
## API
* <a href="#ctor"><code><b>new BufferList([ buf ])</b></code></a>
* <a href="#isBufferList"><code><b>BufferList.isBufferList(obj)</b></code></a>
* <a href="#length"><code>bl.<b>length</b></code></a>
* <a href="#append"><code>bl.<b>append(buffer)</b></code></a>
* <a href="#get"><code>bl.<b>get(index)</b></code></a>
* <a href="#indexOf"><code>bl.<b>indexOf(value[, byteOffset][, encoding])</b></code></a>
* <a href="#slice"><code>bl.<b>slice([ start[, end ] ])</b></code></a>
* <a href="#shallowSlice"><code>bl.<b>shallowSlice([ start[, end ] ])</b></code></a>
* <a href="#copy"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>
* <a href="#duplicate"><code>bl.<b>duplicate()</b></code></a>
* <a href="#consume"><code>bl.<b>consume(bytes)</b></code></a>
* <a href="#toString"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>
* <a href="#readXX"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>
* <a href="#ctorStream"><code><b>new BufferListStream([ callback ])</b></code></a>
--------------------------------------------------------
<a name="ctor"></a>
### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ])
No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects.
`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
```js
const { BufferList } = require('bl')
const bl = BufferList()
// equivalent to:
const { BufferList } = require('bl')
const bl = new BufferList()
```
--------------------------------------------------------
<a name="isBufferList"></a>
### BufferList.isBufferList(obj)
Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise.
N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added.
--------------------------------------------------------
<a name="length"></a>
### bl.length
Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
--------------------------------------------------------
<a name="append"></a>
### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
--------------------------------------------------------
<a name="get"></a>
### bl.get(index)
`get()` will return the byte at the specified index.
--------------------------------------------------------
<a name="indexOf"></a>
### bl.indexOf(value[, byteOffset][, encoding])
`get()` will return the byte at the specified index.
`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present.
--------------------------------------------------------
<a name="slice"></a>
### bl.slice([ start, [ end ] ])
`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
--------------------------------------------------------
<a name="shallowSlice"></a>
### bl.shallowSlice([ start, [ end ] ])
`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
No copies will be performed. All buffers in the result share memory with the original list.
--------------------------------------------------------
<a name="copy"></a>
### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
--------------------------------------------------------
<a name="duplicate"></a>
### bl.duplicate()
`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
```js
var bl = new BufferListStream()
bl.append('hello')
bl.append(' world')
bl.append('\n')
bl.duplicate().pipe(process.stdout, { end: false })
console.log(bl.toString())
```
--------------------------------------------------------
<a name="consume"></a>
### bl.consume(bytes)
`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.
--------------------------------------------------------
<a name="toString"></a>
### bl.toString([encoding, [ start, [ end ]]])
`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
--------------------------------------------------------
<a name="readXX"></a>
### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
See the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.
--------------------------------------------------------
<a name="ctorStream"></a>
### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance.
The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
```js
const { BufferListStream } = require('bl')
const bl = BufferListStream()
// equivalent to:
const { BufferListStream } = require('bl')
const bl = new BufferListStream()
```
N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`:
```js
const { BufferListStream } = require('bl')
// equivalent to:
const BufferListStream = require('bl')
```
--------------------------------------------------------
## Contributors
**bl** is brought to you by the following hackers:
* [Rod Vagg](https://github.com/rvagg)
* [Matteo Collina](https://github.com/mcollina)
* [Jarett Cruger](https://github.com/jcrugzz)
<a name="license"></a>
## License &amp; copyright
Copyright (c) 2013-2019 bl contributors (listed above).
bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.

84
node_modules/bl/bl.js generated vendored Normal file
View File

@@ -0,0 +1,84 @@
'use strict'
const DuplexStream = require('readable-stream').Duplex
const inherits = require('inherits')
const BufferList = require('./BufferList')
function BufferListStream (callback) {
if (!(this instanceof BufferListStream)) {
return new BufferListStream(callback)
}
if (typeof callback === 'function') {
this._callback = callback
const piper = function piper (err) {
if (this._callback) {
this._callback(err)
this._callback = null
}
}.bind(this)
this.on('pipe', function onPipe (src) {
src.on('error', piper)
})
this.on('unpipe', function onUnpipe (src) {
src.removeListener('error', piper)
})
callback = null
}
BufferList._init.call(this, callback)
DuplexStream.call(this)
}
inherits(BufferListStream, DuplexStream)
Object.assign(BufferListStream.prototype, BufferList.prototype)
BufferListStream.prototype._new = function _new (callback) {
return new BufferListStream(callback)
}
BufferListStream.prototype._write = function _write (buf, encoding, callback) {
this._appendBuffer(buf)
if (typeof callback === 'function') {
callback()
}
}
BufferListStream.prototype._read = function _read (size) {
if (!this.length) {
return this.push(null)
}
size = Math.min(size, this.length)
this.push(this.slice(0, size))
this.consume(size)
}
BufferListStream.prototype.end = function end (chunk) {
DuplexStream.prototype.end.call(this, chunk)
if (this._callback) {
this._callback(null, this.slice())
this._callback = null
}
}
BufferListStream.prototype._destroy = function _destroy (err, cb) {
this._bufs.length = 0
this.length = 0
cb(err)
}
BufferListStream.prototype._isBufferList = function _isBufferList (b) {
return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b)
}
BufferListStream.isBufferList = BufferList.isBufferList
module.exports = BufferListStream
module.exports.BufferListStream = BufferListStream
module.exports.BufferList = BufferList

View File

@@ -0,0 +1,38 @@
# Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
* (a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
* (b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
* (c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
* (d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
## Moderation Policy
The [Node.js Moderation Policy] applies to this WG.
## Code of Conduct
The [Node.js Code of Conduct][] applies to this WG.
[Node.js Code of Conduct]:
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
[Node.js Moderation Policy]:
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md

View File

@@ -0,0 +1,136 @@
### Streams Working Group
The Node.js Streams is jointly governed by a Working Group
(WG)
that is responsible for high-level guidance of the project.
The WG has final authority over this project including:
* Technical direction
* Project governance and process (including this policy)
* Contribution policy
* GitHub repository hosting
* Conduct guidelines
* Maintaining the list of additional Collaborators
For the current list of WG members, see the project
[README.md](./README.md#current-project-team-members).
### Collaborators
The readable-stream GitHub repository is
maintained by the WG and additional Collaborators who are added by the
WG on an ongoing basis.
Individuals making significant and valuable contributions are made
Collaborators and given commit-access to the project. These
individuals are identified by the WG and their addition as
Collaborators is discussed during the WG meeting.
_Note:_ If you make a significant contribution and are not considered
for commit-access log an issue or contact a WG member directly and it
will be brought up in the next WG meeting.
Modifications of the contents of the readable-stream repository are
made on
a collaborative basis. Anybody with a GitHub account may propose a
modification via pull request and it will be considered by the project
Collaborators. All pull requests must be reviewed and accepted by a
Collaborator with sufficient expertise who is able to take full
responsibility for the change. In the case of pull requests proposed
by an existing Collaborator, an additional Collaborator is required
for sign-off. Consensus should be sought if additional Collaborators
participate and there is disagreement around a particular
modification. See _Consensus Seeking Process_ below for further detail
on the consensus model used for governance.
Collaborators may opt to elevate significant or controversial
modifications, or modifications that have not found consensus to the
WG for discussion by assigning the ***WG-agenda*** tag to a pull
request or issue. The WG should serve as the final arbiter where
required.
For the current list of Collaborators, see the project
[README.md](./README.md#members).
### WG Membership
WG seats are not time-limited. There is no fixed size of the WG.
However, the expected target is between 6 and 12, to ensure adequate
coverage of important areas of expertise, balanced with the ability to
make decisions efficiently.
There is no specific set of requirements or qualifications for WG
membership beyond these rules.
The WG may add additional members to the WG by unanimous consensus.
A WG member may be removed from the WG by voluntary resignation, or by
unanimous consensus of all other WG members.
Changes to WG membership should be posted in the agenda, and may be
suggested as any other agenda item (see "WG Meetings" below).
If an addition or removal is proposed during a meeting, and the full
WG is not in attendance to participate, then the addition or removal
is added to the agenda for the subsequent meeting. This is to ensure
that all members are given the opportunity to participate in all
membership decisions. If a WG member is unable to attend a meeting
where a planned membership decision is being made, then their consent
is assumed.
No more than 1/3 of the WG members may be affiliated with the same
employer. If removal or resignation of a WG member, or a change of
employment by a WG member, creates a situation where more than 1/3 of
the WG membership shares an employer, then the situation must be
immediately remedied by the resignation or removal of one or more WG
members affiliated with the over-represented employer(s).
### WG Meetings
The WG meets occasionally on a Google Hangout On Air. A designated moderator
approved by the WG runs the meeting. Each meeting should be
published to YouTube.
Items are added to the WG agenda that are considered contentious or
are modifications of governance, contribution policy, WG membership,
or release process.
The intention of the agenda is not to approve or review all patches;
that should happen continuously on GitHub and be handled by the larger
group of Collaborators.
Any community member or contributor can ask that something be added to
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
WG member or the moderator can add the item to the agenda by adding
the ***WG-agenda*** tag to the issue.
Prior to each WG meeting the moderator will share the Agenda with
members of the WG. WG members can add any items they like to the
agenda at the beginning of each meeting. The moderator and the WG
cannot veto or remove items.
The WG may invite persons or representatives from certain projects to
participate in a non-voting capacity.
The moderator is responsible for summarizing the discussion of each
agenda item and sends it as a pull request after the meeting.
### Consensus Seeking Process
The WG follows a
[Consensus
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
decision-making model.
When an agenda item has appeared to reach a consensus the moderator
will ask "Does anyone object?" as a final call for dissent from the
consensus.
If an agenda item cannot reach a consensus a WG member can call for
either a closing vote or a vote to table the issue to the next
meeting. The call for a vote must be seconded by a majority of the WG
or else the discussion will continue. Simple majority wins.
Note that changes to WG membership require a majority consensus. See
"WG Membership" above.

47
node_modules/bl/node_modules/readable-stream/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,47 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

106
node_modules/bl/node_modules/readable-stream/README.md generated vendored Normal file
View File

@@ -0,0 +1,106 @@
# readable-stream
***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream)
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream)
```bash
npm install --save readable-stream
```
This package is a mirror of the streams implementations in Node.js.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html).
If you want to guarantee a stable streams base, regardless of what version of
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
As of version 2.0.0 **readable-stream** uses semantic versioning.
## Version 3.x.x
v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
1. Error codes: https://github.com/nodejs/node/pull/13310,
https://github.com/nodejs/node/pull/13291,
https://github.com/nodejs/node/pull/16589,
https://github.com/nodejs/node/pull/15042,
https://github.com/nodejs/node/pull/15665,
https://github.com/nodejs/readable-stream/pull/344
2. 'readable' have precedence over flowing
https://github.com/nodejs/node/pull/18994
3. make virtual methods errors consistent
https://github.com/nodejs/node/pull/18813
4. updated streams error handling
https://github.com/nodejs/node/pull/18438
5. writable.end should return this.
https://github.com/nodejs/node/pull/18780
6. readable continues to read when push('')
https://github.com/nodejs/node/pull/18211
7. add custom inspect to BufferList
https://github.com/nodejs/node/pull/17907
8. always defer 'readable' with nextTick
https://github.com/nodejs/node/pull/17979
## Version 2.x.x
v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
### Big Thanks
Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce]
# Usage
You can swap your `require('stream')` with `require('readable-stream')`
without any changes, if you are just using one of the main classes and
functions.
```js
const {
Readable,
Writable,
Transform,
Duplex,
pipeline,
finished
} = require('readable-stream')
````
Note that `require('stream')` will return `Stream`, while
`require('readable-stream')` will return `Readable`. We discourage using
whatever is exported directly, but rather use one of the properties as
shown in the example above.
# Streams Working Group
`readable-stream` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
<a name="members"></a>
## Team Members
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) &lt;shestak.irina@gmail.com&gt;
* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) &lt;yoshuawuyts@gmail.com&gt;
[sauce]: https://saucelabs.com

View File

@@ -0,0 +1,127 @@
'use strict';
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
var codes = {};
function createErrorType(code, message, Base) {
if (!Base) {
Base = Error;
}
function getMessage(arg1, arg2, arg3) {
if (typeof message === 'string') {
return message;
} else {
return message(arg1, arg2, arg3);
}
}
var NodeError =
/*#__PURE__*/
function (_Base) {
_inheritsLoose(NodeError, _Base);
function NodeError(arg1, arg2, arg3) {
return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
}
return NodeError;
}(Base);
NodeError.prototype.name = Base.name;
NodeError.prototype.code = code;
codes[code] = NodeError;
} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
function oneOf(expected, thing) {
if (Array.isArray(expected)) {
var len = expected.length;
expected = expected.map(function (i) {
return String(i);
});
if (len > 2) {
return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
} else if (len === 2) {
return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
} else {
return "of ".concat(thing, " ").concat(expected[0]);
}
} else {
return "of ".concat(thing, " ").concat(String(expected));
}
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
function startsWith(str, search, pos) {
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
function endsWith(str, search, this_len) {
if (this_len === undefined || this_len > str.length) {
this_len = str.length;
}
return str.substring(this_len - search.length, this_len) === search;
} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
function includes(str, search, start) {
if (typeof start !== 'number') {
start = 0;
}
if (start + search.length > str.length) {
return false;
} else {
return str.indexOf(search, start) !== -1;
}
}
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
return 'The value "' + value + '" is invalid for option "' + name + '"';
}, TypeError);
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
// determiner: 'must be' or 'must not be'
var determiner;
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
determiner = 'must not be';
expected = expected.replace(/^not /, '');
} else {
determiner = 'must be';
}
var msg;
if (endsWith(name, ' argument')) {
// For cases like 'first argument'
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
} else {
var type = includes(name, '.') ? 'property' : 'argument';
msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
}
msg += ". Received type ".concat(typeof actual);
return msg;
}, TypeError);
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
return 'The ' + name + ' method is not implemented';
});
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
createErrorType('ERR_STREAM_DESTROYED', function (name) {
return 'Cannot call ' + name + ' after a stream was destroyed';
});
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
return 'Unknown encoding: ' + arg;
}, TypeError);
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;

116
node_modules/bl/node_modules/readable-stream/errors.js generated vendored Normal file
View File

@@ -0,0 +1,116 @@
'use strict';
const codes = {};
function createErrorType(code, message, Base) {
if (!Base) {
Base = Error
}
function getMessage (arg1, arg2, arg3) {
if (typeof message === 'string') {
return message
} else {
return message(arg1, arg2, arg3)
}
}
class NodeError extends Base {
constructor (arg1, arg2, arg3) {
super(getMessage(arg1, arg2, arg3));
}
}
NodeError.prototype.name = Base.name;
NodeError.prototype.code = code;
codes[code] = NodeError;
}
// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
function oneOf(expected, thing) {
if (Array.isArray(expected)) {
const len = expected.length;
expected = expected.map((i) => String(i));
if (len > 2) {
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
expected[len - 1];
} else if (len === 2) {
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
} else {
return `of ${thing} ${expected[0]}`;
}
} else {
return `of ${thing} ${String(expected)}`;
}
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
function startsWith(str, search, pos) {
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
function endsWith(str, search, this_len) {
if (this_len === undefined || this_len > str.length) {
this_len = str.length;
}
return str.substring(this_len - search.length, this_len) === search;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
function includes(str, search, start) {
if (typeof start !== 'number') {
start = 0;
}
if (start + search.length > str.length) {
return false;
} else {
return str.indexOf(search, start) !== -1;
}
}
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
return 'The value "' + value + '" is invalid for option "' + name + '"'
}, TypeError);
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
// determiner: 'must be' or 'must not be'
let determiner;
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
determiner = 'must not be';
expected = expected.replace(/^not /, '');
} else {
determiner = 'must be';
}
let msg;
if (endsWith(name, ' argument')) {
// For cases like 'first argument'
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
} else {
const type = includes(name, '.') ? 'property' : 'argument';
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
}
msg += `. Received type ${typeof actual}`;
return msg;
}, TypeError);
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
return 'The ' + name + ' method is not implemented'
});
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
createErrorType('ERR_STREAM_DESTROYED', function (name) {
return 'Cannot call ' + name + ' after a stream was destroyed';
});
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
return 'Unknown encoding: ' + arg
}, TypeError);
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
module.exports.codes = codes;

View File

@@ -0,0 +1,17 @@
'use strict'
var experimentalWarnings = new Set();
function emitExperimentalWarning(feature) {
if (experimentalWarnings.has(feature)) return;
var msg = feature + ' is an experimental feature. This feature could ' +
'change at any time';
experimentalWarnings.add(feature);
process.emitWarning(msg, 'ExperimentalWarning');
}
function noop() {}
module.exports.emitExperimentalWarning = process.emitWarning
? emitExperimentalWarning
: noop;

View File

@@ -0,0 +1,139 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
'use strict';
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) {
keys.push(key);
}
return keys;
};
/*</replacement>*/
module.exports = Duplex;
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
require('inherits')(Duplex, Readable);
{
// Allow the keys array to be GC'ed.
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
}
function Duplex(options) {
if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
this.allowHalfOpen = true;
if (options) {
if (options.readable === false) this.readable = false;
if (options.writable === false) this.writable = false;
if (options.allowHalfOpen === false) {
this.allowHalfOpen = false;
this.once('end', onend);
}
}
}
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.highWaterMark;
}
});
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState && this._writableState.getBuffer();
}
});
Object.defineProperty(Duplex.prototype, 'writableLength', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.length;
}
}); // the no-half-open enforcer
function onend() {
// If the writable side ended, then we're ok.
if (this._writableState.ended) return; // no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex.prototype, 'destroyed', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function set(value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
} // backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});

View File

@@ -0,0 +1,39 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
require('inherits')(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,201 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var _require$codes = require('../errors').codes,
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
var Duplex = require('./_stream_duplex');
require('inherits')(Transform, Duplex);
function afterTransform(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (cb === null) {
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
}
ts.writechunk = null;
ts.writecb = null;
if (data != null) // single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = {
afterTransform: afterTransform.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
}; // start out asking for a readable event once data is transformed.
this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
} // When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish);
}
function prefinish() {
var _this = this;
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
this._flush(function (er, data) {
done(_this, er, data);
});
} else {
done(this, null, null);
}
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
}; // This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
}; // Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform.prototype._destroy = function (err, cb) {
Duplex.prototype._destroy.call(this, err, function (err2) {
cb(err2);
});
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null) // single equals check for both `null` and `undefined`
stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
return stream.push(null);
}

View File

@@ -0,0 +1,697 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
'use strict';
module.exports = Writable;
/* <replacement> */
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
this.next = null;
} // It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function () {
onCorkedFinish(_this, state);
};
}
/* </replacement> */
/*<replacement>*/
var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var internalUtil = {
deprecate: require('util-deprecate')
};
/*</replacement>*/
/*<replacement>*/
var Stream = require('./internal/streams/stream');
/*</replacement>*/
var Buffer = require('buffer').Buffer;
var OurUint8Array = global.Uint8Array || function () {};
function _uint8ArrayToBuffer(chunk) {
return Buffer.from(chunk);
}
function _isUint8Array(obj) {
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
}
var destroyImpl = require('./internal/streams/destroy');
var _require = require('./internal/streams/state'),
getHighWaterMark = _require.getHighWaterMark;
var _require$codes = require('../errors').codes,
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
var errorOrDestroy = destroyImpl.errorOrDestroy;
require('inherits')(Writable, Stream);
function nop() {}
function WritableState(options, stream, isDuplex) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {}; // Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream,
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
this.finalCalled = false; // drain event flag.
this.needDrain = false; // at the start of calling end()
this.ending = false; // when end() has been called, and returned
this.ended = false; // when 'finish' is emitted
this.finished = false; // has it been destroyed
this.destroyed = false; // should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0; // a flag to see when we're in the middle of a write.
this.writing = false; // when true all writes will be buffered until .uncork() call
this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true; // a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite(stream, er);
}; // the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null; // the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false; // True if the error was already emitted and should not be thrown again
this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
this.autoDestroy = !!options.autoDestroy; // count buffered requests
this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState.prototype, 'buffer', {
get: internalUtil.deprecate(function writableStateBufferGetter() {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
});
} catch (_) {}
})(); // Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable, Symbol.hasInstance, {
value: function value(object) {
if (realHasInstance.call(this, object)) return true;
if (this !== Writable) return false;
return object && object._writableState instanceof WritableState;
}
});
} else {
realHasInstance = function realHasInstance(object) {
return object instanceof this;
};
}
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
// Checking for a Stream.Duplex instance is faster here instead of inside
// the WritableState constructor, at least with V8 6.5
var isDuplex = this instanceof Duplex;
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
this._writableState = new WritableState(options, this, isDuplex); // legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
if (typeof options.final === 'function') this._final = options.final;
}
Stream.call(this);
} // Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
};
function writeAfterEnd(stream, cb) {
var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
errorOrDestroy(stream, er);
process.nextTick(cb, er);
} // Checks that a user-supplied chunk is valid, especially for the particular
// mode the stream is in. Currently this means that `null` is never accepted
// and undefined/non-string values are only allowed in object mode.
function validChunk(stream, state, chunk, cb) {
var er;
if (chunk === null) {
er = new ERR_STREAM_NULL_VALUES();
} else if (typeof chunk !== 'string' && !state.objectMode) {
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
}
if (er) {
errorOrDestroy(stream, er);
process.nextTick(cb, er);
return false;
}
return true;
}
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
var isBuf = !state.objectMode && _isUint8Array(chunk);
if (isBuf && !Buffer.isBuffer(chunk)) {
chunk = _uint8ArrayToBuffer(chunk);
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function () {
this._writableState.corked++;
};
Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
Object.defineProperty(Writable.prototype, 'writableBuffer', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState && this._writableState.getBuffer();
}
});
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
return chunk;
}
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.highWaterMark;
}
}); // if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
if (!isBuf) {
var newChunk = decodeChunk(state, chunk, encoding);
if (chunk !== newChunk) {
isBuf = true;
encoding = 'buffer';
chunk = newChunk;
}
}
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = {
chunk: chunk,
encoding: encoding,
isBuf: isBuf,
callback: cb,
next: null
};
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) {
// defer the callback if we are being called synchronously
// to avoid piling up things on the stack
process.nextTick(cb, er); // this can emit finish, and it will always happen
// after error
process.nextTick(finishMaybe, stream, state);
stream._writableState.errorEmitted = true;
errorOrDestroy(stream, er);
} else {
// the caller expect this to happen before if
// it is async
cb(er);
stream._writableState.errorEmitted = true;
errorOrDestroy(stream, er); // this can emit finish, but finish must
// always follow error
finishMaybe(stream, state);
}
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
onwriteStateUpdate(state);
if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state) || stream.destroyed;
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
process.nextTick(afterWrite, stream, state, finished, cb);
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
} // Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
} // if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
var allBuffers = true;
while (entry) {
buffer[count] = entry;
if (!entry.isBuf) allBuffers = false;
entry = entry.next;
count += 1;
}
buffer.allBuffers = allBuffers;
doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
state.bufferedRequestCount = 0;
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable.prototype._write = function (chunk, encoding, cb) {
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
} // ignore unnecessary end() calls.
if (!state.ending) endWritable(this, state, cb);
return this;
};
Object.defineProperty(Writable.prototype, 'writableLength', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
return this._writableState.length;
}
});
function needFinish(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function callFinal(stream, state) {
stream._final(function (err) {
state.pendingcb--;
if (err) {
errorOrDestroy(stream, err);
}
state.prefinished = true;
stream.emit('prefinish');
finishMaybe(stream, state);
});
}
function prefinish(stream, state) {
if (!state.prefinished && !state.finalCalled) {
if (typeof stream._final === 'function' && !state.destroyed) {
state.pendingcb++;
state.finalCalled = true;
process.nextTick(callFinal, stream, state);
} else {
state.prefinished = true;
stream.emit('prefinish');
}
}
}
function finishMaybe(stream, state) {
var need = needFinish(state);
if (need) {
prefinish(stream, state);
if (state.pendingcb === 0) {
state.finished = true;
stream.emit('finish');
if (state.autoDestroy) {
// In case of duplex streams we need a way to detect
// if the readable side is ready for autoDestroy as well
var rState = stream._readableState;
if (!rState || rState.autoDestroy && rState.endEmitted) {
stream.destroy();
}
}
}
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
function onCorkedFinish(corkReq, state, err) {
var entry = corkReq.entry;
corkReq.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
} // reuse the free corkReq.
state.corkedRequestsFree.next = corkReq;
}
Object.defineProperty(Writable.prototype, 'destroyed', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function get() {
if (this._writableState === undefined) {
return false;
}
return this._writableState.destroyed;
},
set: function set(value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._writableState) {
return;
} // backward compatibility, the user is explicitly
// managing destroyed
this._writableState.destroyed = value;
}
});
Writable.prototype.destroy = destroyImpl.destroy;
Writable.prototype._undestroy = destroyImpl.undestroy;
Writable.prototype._destroy = function (err, cb) {
cb(err);
};

View File

@@ -0,0 +1,207 @@
'use strict';
var _Object$setPrototypeO;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var finished = require('./end-of-stream');
var kLastResolve = Symbol('lastResolve');
var kLastReject = Symbol('lastReject');
var kError = Symbol('error');
var kEnded = Symbol('ended');
var kLastPromise = Symbol('lastPromise');
var kHandlePromise = Symbol('handlePromise');
var kStream = Symbol('stream');
function createIterResult(value, done) {
return {
value: value,
done: done
};
}
function readAndResolve(iter) {
var resolve = iter[kLastResolve];
if (resolve !== null) {
var data = iter[kStream].read(); // we defer if data is null
// we can be expecting either 'end' or
// 'error'
if (data !== null) {
iter[kLastPromise] = null;
iter[kLastResolve] = null;
iter[kLastReject] = null;
resolve(createIterResult(data, false));
}
}
}
function onReadable(iter) {
// we wait for the next tick, because it might
// emit an error with process.nextTick
process.nextTick(readAndResolve, iter);
}
function wrapForNext(lastPromise, iter) {
return function (resolve, reject) {
lastPromise.then(function () {
if (iter[kEnded]) {
resolve(createIterResult(undefined, true));
return;
}
iter[kHandlePromise](resolve, reject);
}, reject);
};
}
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
get stream() {
return this[kStream];
},
next: function next() {
var _this = this;
// if we have detected an error in the meanwhile
// reject straight away
var error = this[kError];
if (error !== null) {
return Promise.reject(error);
}
if (this[kEnded]) {
return Promise.resolve(createIterResult(undefined, true));
}
if (this[kStream].destroyed) {
// We need to defer via nextTick because if .destroy(err) is
// called, the error will be emitted via nextTick, and
// we cannot guarantee that there is no error lingering around
// waiting to be emitted.
return new Promise(function (resolve, reject) {
process.nextTick(function () {
if (_this[kError]) {
reject(_this[kError]);
} else {
resolve(createIterResult(undefined, true));
}
});
});
} // if we have multiple next() calls
// we will wait for the previous Promise to finish
// this logic is optimized to support for await loops,
// where next() is only called once at a time
var lastPromise = this[kLastPromise];
var promise;
if (lastPromise) {
promise = new Promise(wrapForNext(lastPromise, this));
} else {
// fast path needed to support multiple this.push()
// without triggering the next() queue
var data = this[kStream].read();
if (data !== null) {
return Promise.resolve(createIterResult(data, false));
}
promise = new Promise(this[kHandlePromise]);
}
this[kLastPromise] = promise;
return promise;
}
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
return this;
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
var _this2 = this;
// destroy(err, cb) is a private API
// we can guarantee we have that here, because we control the
// Readable class this is attached to
return new Promise(function (resolve, reject) {
_this2[kStream].destroy(null, function (err) {
if (err) {
reject(err);
return;
}
resolve(createIterResult(undefined, true));
});
});
}), _Object$setPrototypeO), AsyncIteratorPrototype);
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
var _Object$create;
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
value: stream,
writable: true
}), _defineProperty(_Object$create, kLastResolve, {
value: null,
writable: true
}), _defineProperty(_Object$create, kLastReject, {
value: null,
writable: true
}), _defineProperty(_Object$create, kError, {
value: null,
writable: true
}), _defineProperty(_Object$create, kEnded, {
value: stream._readableState.endEmitted,
writable: true
}), _defineProperty(_Object$create, kHandlePromise, {
value: function value(resolve, reject) {
var data = iterator[kStream].read();
if (data) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
resolve(createIterResult(data, false));
} else {
iterator[kLastResolve] = resolve;
iterator[kLastReject] = reject;
}
},
writable: true
}), _Object$create));
iterator[kLastPromise] = null;
finished(stream, function (err) {
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
// returned by next() and store the error
if (reject !== null) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
reject(err);
}
iterator[kError] = err;
return;
}
var resolve = iterator[kLastResolve];
if (resolve !== null) {
iterator[kLastPromise] = null;
iterator[kLastResolve] = null;
iterator[kLastReject] = null;
resolve(createIterResult(undefined, true));
}
iterator[kEnded] = true;
});
stream.on('readable', onReadable.bind(null, iterator));
return iterator;
};
module.exports = createReadableStreamAsyncIterator;

View File

@@ -0,0 +1,210 @@
'use strict';
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
var _require = require('buffer'),
Buffer = _require.Buffer;
var _require2 = require('util'),
inspect = _require2.inspect;
var custom = inspect && inspect.custom || 'inspect';
function copyBuffer(src, target, offset) {
Buffer.prototype.copy.call(src, target, offset);
}
module.exports =
/*#__PURE__*/
function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
_createClass(BufferList, [{
key: "push",
value: function push(v) {
var entry = {
data: v,
next: null
};
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
}
}, {
key: "unshift",
value: function unshift(v) {
var entry = {
data: v,
next: this.head
};
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
}
}, {
key: "shift",
value: function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
}
}, {
key: "clear",
value: function clear() {
this.head = this.tail = null;
this.length = 0;
}
}, {
key: "join",
value: function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}
return ret;
}
}, {
key: "concat",
value: function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
} // Consumes a specified amount of bytes or characters from the buffered data.
}, {
key: "consume",
value: function consume(n, hasStrings) {
var ret;
if (n < this.head.data.length) {
// `slice` is the same for buffers and strings.
ret = this.head.data.slice(0, n);
this.head.data = this.head.data.slice(n);
} else if (n === this.head.data.length) {
// First chunk is a perfect match.
ret = this.shift();
} else {
// Result spans more than one buffer.
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
}
return ret;
}
}, {
key: "first",
value: function first() {
return this.head.data;
} // Consumes a specified amount of characters from the buffered data.
}, {
key: "_getString",
value: function _getString(n) {
var p = this.head;
var c = 1;
var ret = p.data;
n -= ret.length;
while (p = p.next) {
var str = p.data;
var nb = n > str.length ? str.length : n;
if (nb === str.length) ret += str;else ret += str.slice(0, n);
n -= nb;
if (n === 0) {
if (nb === str.length) {
++c;
if (p.next) this.head = p.next;else this.head = this.tail = null;
} else {
this.head = p;
p.data = str.slice(nb);
}
break;
}
++c;
}
this.length -= c;
return ret;
} // Consumes a specified amount of bytes from the buffered data.
}, {
key: "_getBuffer",
value: function _getBuffer(n) {
var ret = Buffer.allocUnsafe(n);
var p = this.head;
var c = 1;
p.data.copy(ret);
n -= p.data.length;
while (p = p.next) {
var buf = p.data;
var nb = n > buf.length ? buf.length : n;
buf.copy(ret, ret.length - n, 0, nb);
n -= nb;
if (n === 0) {
if (nb === buf.length) {
++c;
if (p.next) this.head = p.next;else this.head = this.tail = null;
} else {
this.head = p;
p.data = buf.slice(nb);
}
break;
}
++c;
}
this.length -= c;
return ret;
} // Make sure the linked list only shows the minimal necessary information.
}, {
key: custom,
value: function value(_, options) {
return inspect(this, _objectSpread({}, options, {
// Only inspect one level.
depth: 0,
// It should not recurse.
customInspect: false
}));
}
}]);
return BufferList;
}();

View File

@@ -0,0 +1,105 @@
'use strict'; // undocumented cb() API, needed for core, not for public API
function destroy(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err) {
if (!this._writableState) {
process.nextTick(emitErrorNT, this, err);
} else if (!this._writableState.errorEmitted) {
this._writableState.errorEmitted = true;
process.nextTick(emitErrorNT, this, err);
}
}
return this;
} // we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
} // if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
if (!_this._writableState) {
process.nextTick(emitErrorAndCloseNT, _this, err);
} else if (!_this._writableState.errorEmitted) {
_this._writableState.errorEmitted = true;
process.nextTick(emitErrorAndCloseNT, _this, err);
} else {
process.nextTick(emitCloseNT, _this);
}
} else if (cb) {
process.nextTick(emitCloseNT, _this);
cb(err);
} else {
process.nextTick(emitCloseNT, _this);
}
});
return this;
}
function emitErrorAndCloseNT(self, err) {
emitErrorNT(self, err);
emitCloseNT(self);
}
function emitCloseNT(self) {
if (self._writableState && !self._writableState.emitClose) return;
if (self._readableState && !self._readableState.emitClose) return;
self.emit('close');
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finalCalled = false;
this._writableState.prefinished = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
function errorOrDestroy(stream, err) {
// We have tests that rely on errors being emitted
// in the same tick, so changing this is semver major.
// For now when you opt-in to autoDestroy we allow
// the error to be emitted nextTick. In a future
// semver major update we should change the default to this.
var rState = stream._readableState;
var wState = stream._writableState;
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
}
module.exports = {
destroy: destroy,
undestroy: undestroy,
errorOrDestroy: errorOrDestroy
};

View File

@@ -0,0 +1,104 @@
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
function once(callback) {
var called = false;
return function () {
if (called) return;
called = true;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
callback.apply(this, args);
};
}
function noop() {}
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === 'function';
}
function eos(stream, opts, callback) {
if (typeof opts === 'function') return eos(stream, null, opts);
if (!opts) opts = {};
callback = once(callback || noop);
var readable = opts.readable || opts.readable !== false && stream.readable;
var writable = opts.writable || opts.writable !== false && stream.writable;
var onlegacyfinish = function onlegacyfinish() {
if (!stream.writable) onfinish();
};
var writableEnded = stream._writableState && stream._writableState.finished;
var onfinish = function onfinish() {
writable = false;
writableEnded = true;
if (!readable) callback.call(stream);
};
var readableEnded = stream._readableState && stream._readableState.endEmitted;
var onend = function onend() {
readable = false;
readableEnded = true;
if (!writable) callback.call(stream);
};
var onerror = function onerror(err) {
callback.call(stream, err);
};
var onclose = function onclose() {
var err;
if (readable && !readableEnded) {
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
return callback.call(stream, err);
}
if (writable && !writableEnded) {
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
return callback.call(stream, err);
}
};
var onrequest = function onrequest() {
stream.req.on('finish', onfinish);
};
if (isRequest(stream)) {
stream.on('complete', onfinish);
stream.on('abort', onclose);
if (stream.req) onrequest();else stream.on('request', onrequest);
} else if (writable && !stream._writableState) {
// legacy streams
stream.on('end', onlegacyfinish);
stream.on('close', onlegacyfinish);
}
stream.on('end', onend);
stream.on('finish', onfinish);
if (opts.error !== false) stream.on('error', onerror);
stream.on('close', onclose);
return function () {
stream.removeListener('complete', onfinish);
stream.removeListener('abort', onclose);
stream.removeListener('request', onrequest);
if (stream.req) stream.req.removeListener('finish', onfinish);
stream.removeListener('end', onlegacyfinish);
stream.removeListener('close', onlegacyfinish);
stream.removeListener('finish', onfinish);
stream.removeListener('end', onend);
stream.removeListener('error', onerror);
stream.removeListener('close', onclose);
};
}
module.exports = eos;

View File

@@ -0,0 +1,3 @@
module.exports = function () {
throw new Error('Readable.from is not available in the browser')
};

View File

@@ -0,0 +1,64 @@
'use strict';
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;
function from(Readable, iterable, opts) {
var iterator;
if (iterable && typeof iterable.next === 'function') {
iterator = iterable;
} else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
var readable = new Readable(_objectSpread({
objectMode: true
}, opts)); // Reading boolean to protect against _read
// being called before last iteration completion.
var reading = false;
readable._read = function () {
if (!reading) {
reading = true;
next();
}
};
function next() {
return _next2.apply(this, arguments);
}
function _next2() {
_next2 = _asyncToGenerator(function* () {
try {
var _ref = yield iterator.next(),
value = _ref.value,
done = _ref.done;
if (done) {
readable.push(null);
} else if (readable.push((yield value))) {
next();
} else {
reading = false;
}
} catch (err) {
readable.destroy(err);
}
});
return _next2.apply(this, arguments);
}
return readable;
}
module.exports = from;

View File

@@ -0,0 +1,97 @@
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
'use strict';
var eos;
function once(callback) {
var called = false;
return function () {
if (called) return;
called = true;
callback.apply(void 0, arguments);
};
}
var _require$codes = require('../../../errors').codes,
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
function noop(err) {
// Rethrow the error if it exists to avoid swallowing it
if (err) throw err;
}
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === 'function';
}
function destroyer(stream, reading, writing, callback) {
callback = once(callback);
var closed = false;
stream.on('close', function () {
closed = true;
});
if (eos === undefined) eos = require('./end-of-stream');
eos(stream, {
readable: reading,
writable: writing
}, function (err) {
if (err) return callback(err);
closed = true;
callback();
});
var destroyed = false;
return function (err) {
if (closed) return;
if (destroyed) return;
destroyed = true; // request.destroy just do .end - .abort is what we want
if (isRequest(stream)) return stream.abort();
if (typeof stream.destroy === 'function') return stream.destroy();
callback(err || new ERR_STREAM_DESTROYED('pipe'));
};
}
function call(fn) {
fn();
}
function pipe(from, to) {
return from.pipe(to);
}
function popCallback(streams) {
if (!streams.length) return noop;
if (typeof streams[streams.length - 1] !== 'function') return noop;
return streams.pop();
}
function pipeline() {
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
streams[_key] = arguments[_key];
}
var callback = popCallback(streams);
if (Array.isArray(streams[0])) streams = streams[0];
if (streams.length < 2) {
throw new ERR_MISSING_ARGS('streams');
}
var error;
var destroys = streams.map(function (stream, i) {
var reading = i < streams.length - 1;
var writing = i > 0;
return destroyer(stream, reading, writing, function (err) {
if (!error) error = err;
if (err) destroys.forEach(call);
if (reading) return;
destroys.forEach(call);
callback(error);
});
});
return streams.reduce(pipe);
}
module.exports = pipeline;

View File

@@ -0,0 +1,27 @@
'use strict';
var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
function highWaterMarkFrom(options, isDuplex, duplexKey) {
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
}
function getHighWaterMark(state, options, duplexKey, isDuplex) {
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
if (hwm != null) {
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
var name = isDuplex ? duplexKey : 'highWaterMark';
throw new ERR_INVALID_OPT_VALUE(name, hwm);
}
return Math.floor(hwm);
} // Default value
return state.objectMode ? 16 : 16 * 1024;
}
module.exports = {
getHighWaterMark: getHighWaterMark
};

View File

@@ -0,0 +1 @@
module.exports = require('events').EventEmitter;

View File

@@ -0,0 +1 @@
module.exports = require('stream');

View File

@@ -0,0 +1,68 @@
{
"name": "readable-stream",
"version": "3.6.0",
"description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"engines": {
"node": ">= 6"
},
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"devDependencies": {
"@babel/cli": "^7.2.0",
"@babel/core": "^7.2.0",
"@babel/polyfill": "^7.0.0",
"@babel/preset-env": "^7.2.0",
"airtap": "0.0.9",
"assert": "^1.4.0",
"bl": "^2.0.0",
"deep-strict-equal": "^0.2.0",
"events.once": "^2.0.2",
"glob": "^7.1.2",
"gunzip-maybe": "^1.4.1",
"hyperquest": "^2.1.3",
"lolex": "^2.6.0",
"nyc": "^11.0.0",
"pump": "^3.0.0",
"rimraf": "^2.6.2",
"tap": "^12.0.0",
"tape": "^4.9.0",
"tar-fs": "^1.16.2",
"util-promisify": "^2.1.0"
},
"scripts": {
"test": "tap -J --no-esm test/parallel/*.js test/ours/*.js",
"ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap",
"test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js",
"test-browser-local": "airtap --open --local -- test/browser.js",
"cover": "nyc npm test",
"report": "nyc report --reporter=lcov",
"update-browser-errors": "babel -o errors-browser.js errors.js"
},
"repository": {
"type": "git",
"url": "git://github.com/nodejs/readable-stream"
},
"keywords": [
"readable",
"stream",
"pipe"
],
"browser": {
"util": false,
"worker_threads": false,
"./errors": "./errors-browser.js",
"./readable.js": "./readable-browser.js",
"./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js",
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
},
"nyc": {
"include": [
"lib/**.js"
]
},
"license": "MIT"
}

View File

@@ -0,0 +1,9 @@
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
exports.finished = require('./lib/internal/streams/end-of-stream.js');
exports.pipeline = require('./lib/internal/streams/pipeline.js');

View File

@@ -0,0 +1,16 @@
var Stream = require('stream');
if (process.env.READABLE_STREAM === 'disable' && Stream) {
module.exports = Stream.Readable;
Object.assign(module.exports, Stream);
module.exports.Stream = Stream;
} else {
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
exports.finished = require('./lib/internal/streams/end-of-stream.js');
exports.pipeline = require('./lib/internal/streams/pipeline.js');
}

37
node_modules/bl/package.json generated vendored Normal file
View File

@@ -0,0 +1,37 @@
{
"name": "bl",
"version": "4.1.0",
"description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
"license": "MIT",
"main": "bl.js",
"scripts": {
"lint": "standard *.js test/*.js",
"test": "npm run lint && node test/test.js | faucet"
},
"repository": {
"type": "git",
"url": "https://github.com/rvagg/bl.git"
},
"homepage": "https://github.com/rvagg/bl",
"authors": [
"Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
"Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
"Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
],
"keywords": [
"buffer",
"buffers",
"stream",
"awesomesauce"
],
"dependencies": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
},
"devDependencies": {
"faucet": "~0.0.1",
"standard": "^14.3.0",
"tape": "^4.11.0"
}
}

21
node_modules/bl/test/convert.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
'use strict'
const tape = require('tape')
const { BufferList, BufferListStream } = require('../')
const { Buffer } = require('buffer')
tape('convert from BufferList to BufferListStream', (t) => {
const data = Buffer.from(`TEST-${Date.now()}`)
const bl = new BufferList(data)
const bls = new BufferListStream(bl)
t.ok(bl.slice().equals(bls.slice()))
t.end()
})
tape('convert from BufferListStream to BufferList', (t) => {
const data = Buffer.from(`TEST-${Date.now()}`)
const bls = new BufferListStream(data)
const bl = new BufferList(bls)
t.ok(bl.slice().equals(bls.slice()))
t.end()
})

492
node_modules/bl/test/indexOf.js generated vendored Normal file
View File

@@ -0,0 +1,492 @@
'use strict'
const tape = require('tape')
const BufferList = require('../')
const { Buffer } = require('buffer')
tape('indexOf single byte needle', (t) => {
const bl = new BufferList(['abcdefg', 'abcdefg', '12345'])
t.equal(bl.indexOf('e'), 4)
t.equal(bl.indexOf('e', 5), 11)
t.equal(bl.indexOf('e', 12), -1)
t.equal(bl.indexOf('5'), 18)
t.end()
})
tape('indexOf multiple byte needle', (t) => {
const bl = new BufferList(['abcdefg', 'abcdefg'])
t.equal(bl.indexOf('ef'), 4)
t.equal(bl.indexOf('ef', 5), 11)
t.end()
})
tape('indexOf multiple byte needles across buffer boundaries', (t) => {
const bl = new BufferList(['abcdefg', 'abcdefg'])
t.equal(bl.indexOf('fgabc'), 5)
t.end()
})
tape('indexOf takes a Uint8Array search', (t) => {
const bl = new BufferList(['abcdefg', 'abcdefg'])
const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc
t.equal(bl.indexOf(search), 5)
t.end()
})
tape('indexOf takes a buffer list search', (t) => {
const bl = new BufferList(['abcdefg', 'abcdefg'])
const search = new BufferList('fgabc')
t.equal(bl.indexOf(search), 5)
t.end()
})
tape('indexOf a zero byte needle', (t) => {
const b = new BufferList('abcdef')
const bufEmpty = Buffer.from('')
t.equal(b.indexOf(''), 0)
t.equal(b.indexOf('', 1), 1)
t.equal(b.indexOf('', b.length + 1), b.length)
t.equal(b.indexOf('', Infinity), b.length)
t.equal(b.indexOf(bufEmpty), 0)
t.equal(b.indexOf(bufEmpty, 1), 1)
t.equal(b.indexOf(bufEmpty, b.length + 1), b.length)
t.equal(b.indexOf(bufEmpty, Infinity), b.length)
t.end()
})
tape('indexOf buffers smaller and larger than the needle', (t) => {
const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab'])
t.equal(bl.indexOf('fgabc'), 5)
t.equal(bl.indexOf('fgabc', 6), 12)
t.equal(bl.indexOf('fgabc', 13), -1)
t.end()
})
// only present in node 6+
;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => {
const b = new BufferList('abcdef')
// test latin1 encoding
t.equal(
new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
.indexOf('d', 0, 'latin1'),
3
)
t.equal(
new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
.indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'),
3
)
t.equal(
new BufferList(Buffer.from('aa\u00e8aa', 'latin1'))
.indexOf('\u00e8', 'latin1'),
2
)
t.equal(
new BufferList(Buffer.from('\u00e8', 'latin1'))
.indexOf('\u00e8', 'latin1'),
0
)
t.equal(
new BufferList(Buffer.from('\u00e8', 'latin1'))
.indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'),
0
)
// test binary encoding
t.equal(
new BufferList(Buffer.from(b.toString('binary'), 'binary'))
.indexOf('d', 0, 'binary'),
3
)
t.equal(
new BufferList(Buffer.from(b.toString('binary'), 'binary'))
.indexOf(Buffer.from('d', 'binary'), 0, 'binary'),
3
)
t.equal(
new BufferList(Buffer.from('aa\u00e8aa', 'binary'))
.indexOf('\u00e8', 'binary'),
2
)
t.equal(
new BufferList(Buffer.from('\u00e8', 'binary'))
.indexOf('\u00e8', 'binary'),
0
)
t.equal(
new BufferList(Buffer.from('\u00e8', 'binary'))
.indexOf(Buffer.from('\u00e8', 'binary'), 'binary'),
0
)
t.end()
})
tape('indexOf the entire nodejs10 buffer test suite', (t) => {
const b = new BufferList('abcdef')
const bufA = Buffer.from('a')
const bufBc = Buffer.from('bc')
const bufF = Buffer.from('f')
const bufZ = Buffer.from('z')
const stringComparison = 'abcdef'
t.equal(b.indexOf('a'), 0)
t.equal(b.indexOf('a', 1), -1)
t.equal(b.indexOf('a', -1), -1)
t.equal(b.indexOf('a', -4), -1)
t.equal(b.indexOf('a', -b.length), 0)
t.equal(b.indexOf('a', NaN), 0)
t.equal(b.indexOf('a', -Infinity), 0)
t.equal(b.indexOf('a', Infinity), -1)
t.equal(b.indexOf('bc'), 1)
t.equal(b.indexOf('bc', 2), -1)
t.equal(b.indexOf('bc', -1), -1)
t.equal(b.indexOf('bc', -3), -1)
t.equal(b.indexOf('bc', -5), 1)
t.equal(b.indexOf('bc', NaN), 1)
t.equal(b.indexOf('bc', -Infinity), 1)
t.equal(b.indexOf('bc', Infinity), -1)
t.equal(b.indexOf('f'), b.length - 1)
t.equal(b.indexOf('z'), -1)
// empty search tests
t.equal(b.indexOf(bufA), 0)
t.equal(b.indexOf(bufA, 1), -1)
t.equal(b.indexOf(bufA, -1), -1)
t.equal(b.indexOf(bufA, -4), -1)
t.equal(b.indexOf(bufA, -b.length), 0)
t.equal(b.indexOf(bufA, NaN), 0)
t.equal(b.indexOf(bufA, -Infinity), 0)
t.equal(b.indexOf(bufA, Infinity), -1)
t.equal(b.indexOf(bufBc), 1)
t.equal(b.indexOf(bufBc, 2), -1)
t.equal(b.indexOf(bufBc, -1), -1)
t.equal(b.indexOf(bufBc, -3), -1)
t.equal(b.indexOf(bufBc, -5), 1)
t.equal(b.indexOf(bufBc, NaN), 1)
t.equal(b.indexOf(bufBc, -Infinity), 1)
t.equal(b.indexOf(bufBc, Infinity), -1)
t.equal(b.indexOf(bufF), b.length - 1)
t.equal(b.indexOf(bufZ), -1)
t.equal(b.indexOf(0x61), 0)
t.equal(b.indexOf(0x61, 1), -1)
t.equal(b.indexOf(0x61, -1), -1)
t.equal(b.indexOf(0x61, -4), -1)
t.equal(b.indexOf(0x61, -b.length), 0)
t.equal(b.indexOf(0x61, NaN), 0)
t.equal(b.indexOf(0x61, -Infinity), 0)
t.equal(b.indexOf(0x61, Infinity), -1)
t.equal(b.indexOf(0x0), -1)
// test offsets
t.equal(b.indexOf('d', 2), 3)
t.equal(b.indexOf('f', 5), 5)
t.equal(b.indexOf('f', -1), 5)
t.equal(b.indexOf('f', 6), -1)
t.equal(b.indexOf(Buffer.from('d'), 2), 3)
t.equal(b.indexOf(Buffer.from('f'), 5), 5)
t.equal(b.indexOf(Buffer.from('f'), -1), 5)
t.equal(b.indexOf(Buffer.from('f'), 6), -1)
t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1)
// test invalid and uppercase encoding
t.equal(b.indexOf('b', 'utf8'), 1)
t.equal(b.indexOf('b', 'UTF8'), 1)
t.equal(b.indexOf('62', 'HEX'), 1)
t.throws(() => b.indexOf('bad', 'enc'), TypeError)
// test hex encoding
t.equal(
Buffer.from(b.toString('hex'), 'hex')
.indexOf('64', 0, 'hex'),
3
)
t.equal(
Buffer.from(b.toString('hex'), 'hex')
.indexOf(Buffer.from('64', 'hex'), 0, 'hex'),
3
)
// test base64 encoding
t.equal(
Buffer.from(b.toString('base64'), 'base64')
.indexOf('ZA==', 0, 'base64'),
3
)
t.equal(
Buffer.from(b.toString('base64'), 'base64')
.indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'),
3
)
// test ascii encoding
t.equal(
Buffer.from(b.toString('ascii'), 'ascii')
.indexOf('d', 0, 'ascii'),
3
)
t.equal(
Buffer.from(b.toString('ascii'), 'ascii')
.indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'),
3
)
// test optional offset with passed encoding
t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4)
t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4)
{
// test usc2 encoding
const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2'))
t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2'))
t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2'))
t.equal(4, twoByteString.indexOf(
Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2'))
t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2'))
}
const mixedByteStringUcs2 =
Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2')
t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2'))
t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2'))
t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2'))
t.equal(
6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2'))
t.equal(
10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2'))
t.equal(
-1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2'))
{
const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
// Test single char pattern
t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2'))
let index = twoByteString.indexOf('\u0391', 0, 'ucs2')
t.equal(2, index, `Alpha - at index ${index}`)
index = twoByteString.indexOf('\u03a3', 0, 'ucs2')
t.equal(4, index, `First Sigma - at index ${index}`)
index = twoByteString.indexOf('\u03a3', 6, 'ucs2')
t.equal(6, index, `Second Sigma - at index ${index}`)
index = twoByteString.indexOf('\u0395', 0, 'ucs2')
t.equal(8, index, `Epsilon - at index ${index}`)
index = twoByteString.indexOf('\u0392', 0, 'ucs2')
t.equal(-1, index, `Not beta - at index ${index}`)
// Test multi-char pattern
index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2')
t.equal(0, index, `Lambda Alpha - at index ${index}`)
index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2')
t.equal(2, index, `Alpha Sigma - at index ${index}`)
index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2')
t.equal(4, index, `Sigma Sigma - at index ${index}`)
index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2')
t.equal(6, index, `Sigma Epsilon - at index ${index}`)
}
const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395')
t.equal(5, mixedByteStringUtf8.indexOf('bc'))
t.equal(5, mixedByteStringUtf8.indexOf('bc', 5))
t.equal(5, mixedByteStringUtf8.indexOf('bc', -8))
t.equal(7, mixedByteStringUtf8.indexOf('\u03a3'))
t.equal(-1, mixedByteStringUtf8.indexOf('\u0396'))
// Test complex string indexOf algorithms. Only trigger for long strings.
// Long string that isn't a simple repeat of a shorter string.
let longString = 'A'
for (let i = 66; i < 76; i++) { // from 'B' to 'K'
longString = longString + String.fromCharCode(i) + longString
}
const longBufferString = Buffer.from(longString)
// pattern of 15 chars, repeated every 16 chars in long
let pattern = 'ABACABADABACABA'
for (let i = 0; i < longBufferString.length - pattern.length; i += 7) {
const index = longBufferString.indexOf(pattern, i)
t.equal((i + 15) & ~0xf, index,
`Long ABACABA...-string at index ${i}`)
}
let index = longBufferString.indexOf('AJABACA')
t.equal(510, index, `Long AJABACA, First J - at index ${index}`)
index = longBufferString.indexOf('AJABACA', 511)
t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`)
pattern = 'JABACABADABACABA'
index = longBufferString.indexOf(pattern)
t.equal(511, index, `Long JABACABA..., First J - at index ${index}`)
index = longBufferString.indexOf(pattern, 512)
t.equal(
1535, index, `Long JABACABA..., Second J - at index ${index}`)
// Search for a non-ASCII string in a pure ASCII string.
const asciiString = Buffer.from(
'somethingnotatallsinisterwhichalsoworks')
t.equal(-1, asciiString.indexOf('\x2061'))
t.equal(3, asciiString.indexOf('eth', 0))
// Search in string containing many non-ASCII chars.
const allCodePoints = []
for (let i = 0; i < 65536; i++) {
allCodePoints[i] = i
}
const allCharsString = String.fromCharCode.apply(String, allCodePoints)
const allCharsBufferUtf8 = Buffer.from(allCharsString)
const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2')
// Search for string long enough to trigger complex search with ASCII pattern
// and UC16 subject.
t.equal(-1, allCharsBufferUtf8.indexOf('notfound'))
t.equal(-1, allCharsBufferUcs2.indexOf('notfound'))
// Needle is longer than haystack, but only because it's encoded as UTF-16
t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1)
t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0)
t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1)
// Haystack has odd length, but the needle is UCS2.
t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1)
{
// Find substrings in Utf8.
const lengths = [1, 3, 15] // Single char, simple and complex.
const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b]
for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
for (let i = 0; i < indices.length; i++) {
const index = indices[i]
let length = lengths[lengthIndex]
if (index + length > 0x7F) {
length = 2 * length
}
if (index + length > 0x7FF) {
length = 3 * length
}
if (index + length > 0xFFFF) {
length = 4 * length
}
const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length)
t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8))
const patternStringUtf8 = patternBufferUtf8.toString()
t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8))
}
}
}
{
// Find substrings in Usc2.
const lengths = [2, 4, 16] // Single char, simple and complex.
const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0]
for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
for (let i = 0; i < indices.length; i++) {
const index = indices[i] * 2
const length = lengths[lengthIndex]
const patternBufferUcs2 =
allCharsBufferUcs2.slice(index, index + length)
t.equal(
index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2'))
const patternStringUcs2 = patternBufferUcs2.toString('ucs2')
t.equal(
index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2'))
}
}
}
[
() => {},
{},
[]
].forEach((val) => {
t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`)
})
// Test weird offset arguments.
// The following offsets coerce to NaN or 0, searching the whole Buffer
t.equal(b.indexOf('b', undefined), 1)
t.equal(b.indexOf('b', {}), 1)
t.equal(b.indexOf('b', 0), 1)
t.equal(b.indexOf('b', null), 1)
t.equal(b.indexOf('b', []), 1)
// The following offset coerces to 2, in other words +[2] === 2
t.equal(b.indexOf('b', [2]), -1)
// Behavior should match String.indexOf()
t.equal(
b.indexOf('b', undefined),
stringComparison.indexOf('b', undefined))
t.equal(
b.indexOf('b', {}),
stringComparison.indexOf('b', {}))
t.equal(
b.indexOf('b', 0),
stringComparison.indexOf('b', 0))
t.equal(
b.indexOf('b', null),
stringComparison.indexOf('b', null))
t.equal(
b.indexOf('b', []),
stringComparison.indexOf('b', []))
t.equal(
b.indexOf('b', [2]),
stringComparison.indexOf('b', [2]))
// test truncation of Number arguments to uint8
{
const buf = Buffer.from('this is a test')
t.equal(buf.indexOf(0x6973), 3)
t.equal(buf.indexOf(0x697320), 4)
t.equal(buf.indexOf(0x69732069), 2)
t.equal(buf.indexOf(0x697374657374), 0)
t.equal(buf.indexOf(0x69737374), 0)
t.equal(buf.indexOf(0x69737465), 11)
t.equal(buf.indexOf(0x69737465), 11)
t.equal(buf.indexOf(-140), 0)
t.equal(buf.indexOf(-152), 1)
t.equal(buf.indexOf(0xff), -1)
t.equal(buf.indexOf(0xffff), -1)
}
// Test that Uint8Array arguments are okay.
{
const needle = new Uint8Array([0x66, 0x6f, 0x6f])
const haystack = new BufferList(Buffer.from('a foo b foo'))
t.equal(haystack.indexOf(needle), 2)
}
t.end()
})

32
node_modules/bl/test/isBufferList.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
'use strict'
const tape = require('tape')
const { BufferList, BufferListStream } = require('../')
const { Buffer } = require('buffer')
tape('isBufferList positives', (t) => {
t.ok(BufferList.isBufferList(new BufferList()))
t.ok(BufferList.isBufferList(new BufferListStream()))
t.end()
})
tape('isBufferList negatives', (t) => {
const types = [
null,
undefined,
NaN,
true,
false,
{},
[],
Buffer.alloc(0),
[Buffer.alloc(0)]
]
for (const obj of types) {
t.notOk(BufferList.isBufferList(obj))
}
t.end()
})

869
node_modules/bl/test/test.js generated vendored Normal file
View File

@@ -0,0 +1,869 @@
'use strict'
const tape = require('tape')
const crypto = require('crypto')
const fs = require('fs')
const path = require('path')
const BufferList = require('../')
const { Buffer } = require('buffer')
const encodings =
('hex utf8 utf-8 ascii binary base64' +
(process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
require('./indexOf')
require('./isBufferList')
require('./convert')
tape('single bytes from single buffer', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
t.equal(bl.length, 4)
t.equal(bl.get(-1), undefined)
t.equal(bl.get(0), 97)
t.equal(bl.get(1), 98)
t.equal(bl.get(2), 99)
t.equal(bl.get(3), 100)
t.equal(bl.get(4), undefined)
t.end()
})
tape('single bytes from multiple buffers', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
t.equal(bl.length, 10)
t.equal(bl.get(0), 97)
t.equal(bl.get(1), 98)
t.equal(bl.get(2), 99)
t.equal(bl.get(3), 100)
t.equal(bl.get(4), 101)
t.equal(bl.get(5), 102)
t.equal(bl.get(6), 103)
t.equal(bl.get(7), 104)
t.equal(bl.get(8), 105)
t.equal(bl.get(9), 106)
t.end()
})
tape('multi bytes from single buffer', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
t.equal(bl.length, 4)
t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
t.end()
})
tape('multi bytes from single buffer (negative indexes)', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('buffer'))
t.equal(bl.length, 6)
t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
t.end()
})
tape('multiple bytes from multiple buffers', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
t.equal(bl.length, 10)
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
t.end()
})
tape('multiple bytes from multiple buffer lists', function (t) {
const bl = new BufferList()
bl.append(new BufferList([Buffer.from('abcd'), Buffer.from('efg')]))
bl.append(new BufferList([Buffer.from('hi'), Buffer.from('j')]))
t.equal(bl.length, 10)
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
t.end()
})
// same data as previous test, just using nested constructors
tape('multiple bytes from crazy nested buffer lists', function (t) {
const bl = new BufferList()
bl.append(new BufferList([
new BufferList([
new BufferList(Buffer.from('abc')),
Buffer.from('d'),
new BufferList(Buffer.from('efg'))
]),
new BufferList([Buffer.from('hi')]),
new BufferList(Buffer.from('j'))
]))
t.equal(bl.length, 10)
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
t.end()
})
tape('append accepts arrays of Buffers', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abc'))
bl.append([Buffer.from('def')])
bl.append([Buffer.from('ghi'), Buffer.from('jkl')])
bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')])
t.equal(bl.length, 26)
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
t.end()
})
tape('append accepts arrays of Uint8Arrays', function (t) {
const bl = new BufferList()
bl.append(new Uint8Array([97, 98, 99]))
bl.append([Uint8Array.from([100, 101, 102])])
bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])])
bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])])
t.equal(bl.length, 26)
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
t.end()
})
tape('append accepts arrays of BufferLists', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abc'))
bl.append([new BufferList('def')])
bl.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')]))
bl.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])])
t.equal(bl.length, 26)
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
t.end()
})
tape('append chainable', function (t) {
const bl = new BufferList()
t.ok(bl.append(Buffer.from('abcd')) === bl)
t.ok(bl.append([Buffer.from('abcd')]) === bl)
t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl)
t.ok(bl.append([new BufferList(Buffer.from('abcd'))]) === bl)
t.end()
})
tape('append chainable (test results)', function (t) {
const bl = new BufferList('abc')
.append([new BufferList('def')])
.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')]))
.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])])
t.equal(bl.length, 26)
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
t.end()
})
tape('consuming from multiple buffers', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
t.equal(bl.length, 10)
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
bl.consume(3)
t.equal(bl.length, 7)
t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
bl.consume(2)
t.equal(bl.length, 5)
t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
bl.consume(1)
t.equal(bl.length, 4)
t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
bl.consume(1)
t.equal(bl.length, 3)
t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
bl.consume(2)
t.equal(bl.length, 1)
t.equal(bl.slice(0, 1).toString('ascii'), 'j')
t.end()
})
tape('complete consumption', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('a'))
bl.append(Buffer.from('b'))
bl.consume(2)
t.equal(bl.length, 0)
t.equal(bl._bufs.length, 0)
t.end()
})
tape('test readUInt8 / readInt8', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(3)
const bl = new BufferList()
buf1[0] = 0x1
buf2[1] = 0x3
buf2[2] = 0x4
buf3[0] = 0x23
buf3[1] = 0x42
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
t.equal(bl.readUInt8(), 0x1)
t.equal(bl.readUInt8(2), 0x3)
t.equal(bl.readInt8(2), 0x3)
t.equal(bl.readUInt8(3), 0x4)
t.equal(bl.readInt8(3), 0x4)
t.equal(bl.readUInt8(4), 0x23)
t.equal(bl.readInt8(4), 0x23)
t.equal(bl.readUInt8(5), 0x42)
t.equal(bl.readInt8(5), 0x42)
t.end()
})
tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(3)
const bl = new BufferList()
buf1[0] = 0x1
buf2[1] = 0x3
buf2[2] = 0x4
buf3[0] = 0x23
buf3[1] = 0x42
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
t.equal(bl.readUInt16BE(), 0x0100)
t.equal(bl.readUInt16LE(), 0x0001)
t.equal(bl.readUInt16BE(2), 0x0304)
t.equal(bl.readUInt16LE(2), 0x0403)
t.equal(bl.readInt16BE(2), 0x0304)
t.equal(bl.readInt16LE(2), 0x0403)
t.equal(bl.readUInt16BE(3), 0x0423)
t.equal(bl.readUInt16LE(3), 0x2304)
t.equal(bl.readInt16BE(3), 0x0423)
t.equal(bl.readInt16LE(3), 0x2304)
t.equal(bl.readUInt16BE(4), 0x2342)
t.equal(bl.readUInt16LE(4), 0x4223)
t.equal(bl.readInt16BE(4), 0x2342)
t.equal(bl.readInt16LE(4), 0x4223)
t.end()
})
tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(3)
const bl = new BufferList()
buf1[0] = 0x1
buf2[1] = 0x3
buf2[2] = 0x4
buf3[0] = 0x23
buf3[1] = 0x42
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
t.equal(bl.readUInt32BE(), 0x01000304)
t.equal(bl.readUInt32LE(), 0x04030001)
t.equal(bl.readUInt32BE(2), 0x03042342)
t.equal(bl.readUInt32LE(2), 0x42230403)
t.equal(bl.readInt32BE(2), 0x03042342)
t.equal(bl.readInt32LE(2), 0x42230403)
t.end()
})
tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(3)
const bl = new BufferList()
buf2[0] = 0x2
buf2[1] = 0x3
buf2[2] = 0x4
buf3[0] = 0x23
buf3[1] = 0x42
buf3[2] = 0x61
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
t.equal(bl.readUIntBE(1, 1), 0x02)
t.equal(bl.readUIntBE(1, 2), 0x0203)
t.equal(bl.readUIntBE(1, 3), 0x020304)
t.equal(bl.readUIntBE(1, 4), 0x02030423)
t.equal(bl.readUIntBE(1, 5), 0x0203042342)
t.equal(bl.readUIntBE(1, 6), 0x020304234261)
t.equal(bl.readUIntLE(1, 1), 0x02)
t.equal(bl.readUIntLE(1, 2), 0x0302)
t.equal(bl.readUIntLE(1, 3), 0x040302)
t.equal(bl.readUIntLE(1, 4), 0x23040302)
t.equal(bl.readUIntLE(1, 5), 0x4223040302)
t.equal(bl.readUIntLE(1, 6), 0x614223040302)
t.equal(bl.readIntBE(1, 1), 0x02)
t.equal(bl.readIntBE(1, 2), 0x0203)
t.equal(bl.readIntBE(1, 3), 0x020304)
t.equal(bl.readIntBE(1, 4), 0x02030423)
t.equal(bl.readIntBE(1, 5), 0x0203042342)
t.equal(bl.readIntBE(1, 6), 0x020304234261)
t.equal(bl.readIntLE(1, 1), 0x02)
t.equal(bl.readIntLE(1, 2), 0x0302)
t.equal(bl.readIntLE(1, 3), 0x040302)
t.equal(bl.readIntLE(1, 4), 0x23040302)
t.equal(bl.readIntLE(1, 5), 0x4223040302)
t.equal(bl.readIntLE(1, 6), 0x614223040302)
t.end()
})
tape('test readFloatLE / readFloatBE', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(3)
const bl = new BufferList()
buf1[0] = 0x01
buf2[1] = 0x00
buf2[2] = 0x00
buf3[0] = 0x80
buf3[1] = 0x3f
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
const canonical = Buffer.concat([buf1, buf2, buf3])
t.equal(bl.readFloatLE(), canonical.readFloatLE())
t.equal(bl.readFloatBE(), canonical.readFloatBE())
t.equal(bl.readFloatLE(2), canonical.readFloatLE(2))
t.equal(bl.readFloatBE(2), canonical.readFloatBE(2))
t.end()
})
tape('test readDoubleLE / readDoubleBE', function (t) {
const buf1 = Buffer.alloc(1)
const buf2 = Buffer.alloc(3)
const buf3 = Buffer.alloc(10)
const bl = new BufferList()
buf1[0] = 0x01
buf2[1] = 0x55
buf2[2] = 0x55
buf3[0] = 0x55
buf3[1] = 0x55
buf3[2] = 0x55
buf3[3] = 0x55
buf3[4] = 0xd5
buf3[5] = 0x3f
bl.append(buf1)
bl.append(buf2)
bl.append(buf3)
const canonical = Buffer.concat([buf1, buf2, buf3])
t.equal(bl.readDoubleBE(), canonical.readDoubleBE())
t.equal(bl.readDoubleLE(), canonical.readDoubleLE())
t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2))
t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2))
t.end()
})
tape('test toString', function (t) {
const bl = new BufferList()
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
t.equal(bl.toString('ascii', 3, 10), 'defghij')
t.equal(bl.toString('ascii', 3, 6), 'def')
t.equal(bl.toString('ascii', 3, 8), 'defgh')
t.equal(bl.toString('ascii', 5, 10), 'fghij')
t.end()
})
tape('test toString encoding', function (t) {
const bl = new BufferList()
const b = Buffer.from('abcdefghij\xff\x00')
bl.append(Buffer.from('abcd'))
bl.append(Buffer.from('efg'))
bl.append(Buffer.from('hi'))
bl.append(Buffer.from('j'))
bl.append(Buffer.from('\xff\x00'))
encodings.forEach(function (enc) {
t.equal(bl.toString(enc), b.toString(enc), enc)
})
t.end()
})
tape('uninitialized memory', function (t) {
const secret = crypto.randomBytes(256)
for (let i = 0; i < 1e6; i++) {
const clone = Buffer.from(secret)
const bl = new BufferList()
bl.append(Buffer.from('a'))
bl.consume(-1024)
const buf = bl.slice(1)
if (buf.indexOf(clone) !== -1) {
t.fail(`Match (at ${i})`)
break
}
}
t.end()
})
!process.browser && tape('test stream', function (t) {
const random = crypto.randomBytes(65534)
const bl = new BufferList((err, buf) => {
t.ok(Buffer.isBuffer(buf))
t.ok(err === null)
t.ok(random.equals(bl.slice()))
t.ok(random.equals(buf.slice()))
bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
.on('close', function () {
const rndhash = crypto.createHash('md5').update(random).digest('hex')
const md5sum = crypto.createHash('md5')
const s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
s.on('data', md5sum.update.bind(md5sum))
s.on('end', function () {
t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
t.end()
})
})
})
fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
})
tape('instantiation with Buffer', function (t) {
const buf = crypto.randomBytes(1024)
const buf2 = crypto.randomBytes(1024)
let b = BufferList(buf)
t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
b = BufferList([buf, buf2])
t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer')
t.end()
})
tape('test String appendage', function (t) {
const bl = new BufferList()
const b = Buffer.from('abcdefghij\xff\x00')
bl.append('abcd')
bl.append('efg')
bl.append('hi')
bl.append('j')
bl.append('\xff\x00')
encodings.forEach(function (enc) {
t.equal(bl.toString(enc), b.toString(enc))
})
t.end()
})
tape('test Number appendage', function (t) {
const bl = new BufferList()
const b = Buffer.from('1234567890')
bl.append(1234)
bl.append(567)
bl.append(89)
bl.append(0)
encodings.forEach(function (enc) {
t.equal(bl.toString(enc), b.toString(enc))
})
t.end()
})
tape('write nothing, should get empty buffer', function (t) {
t.plan(3)
BufferList(function (err, data) {
t.notOk(err, 'no error')
t.ok(Buffer.isBuffer(data), 'got a buffer')
t.equal(0, data.length, 'got a zero-length buffer')
t.end()
}).end()
})
tape('unicode string', function (t) {
t.plan(2)
const inp1 = '\u2600'
const inp2 = '\u2603'
const exp = inp1 + ' and ' + inp2
const bl = BufferList()
bl.write(inp1)
bl.write(' and ')
bl.write(inp2)
t.equal(exp, bl.toString())
t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex'))
})
tape('should emit finish', function (t) {
const source = BufferList()
const dest = BufferList()
source.write('hello')
source.pipe(dest)
dest.on('finish', function () {
t.equal(dest.toString('utf8'), 'hello')
t.end()
})
})
tape('basic copy', function (t) {
const buf = crypto.randomBytes(1024)
const buf2 = Buffer.alloc(1024)
const b = BufferList(buf)
b.copy(buf2)
t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
t.end()
})
tape('copy after many appends', function (t) {
const buf = crypto.randomBytes(512)
const buf2 = Buffer.alloc(1024)
const b = BufferList(buf)
b.append(buf)
b.copy(buf2)
t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
t.end()
})
tape('copy at a precise position', function (t) {
const buf = crypto.randomBytes(1004)
const buf2 = Buffer.alloc(1024)
const b = BufferList(buf)
b.copy(buf2, 20)
t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
t.end()
})
tape('copy starting from a precise location', function (t) {
const buf = crypto.randomBytes(10)
const buf2 = Buffer.alloc(5)
const b = BufferList(buf)
b.copy(buf2, 0, 5)
t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
t.end()
})
tape('copy in an interval', function (t) {
const rnd = crypto.randomBytes(10)
const b = BufferList(rnd) // put the random bytes there
const actual = Buffer.alloc(3)
const expected = Buffer.alloc(3)
rnd.copy(expected, 0, 5, 8)
b.copy(actual, 0, 5, 8)
t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
t.end()
})
tape('copy an interval between two buffers', function (t) {
const buf = crypto.randomBytes(10)
const buf2 = Buffer.alloc(10)
const b = BufferList(buf)
b.append(buf)
b.copy(buf2, 0, 5, 15)
t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
t.end()
})
tape('shallow slice across buffer boundaries', function (t) {
const bl = new BufferList(['First', 'Second', 'Third'])
t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
t.end()
})
tape('shallow slice within single buffer', function (t) {
t.plan(2)
const bl = new BufferList(['First', 'Second', 'Third'])
t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
t.equal(bl.shallowSlice(7, 10).toString(), 'con')
t.end()
})
tape('shallow slice single buffer', function (t) {
t.plan(3)
const bl = new BufferList(['First', 'Second', 'Third'])
t.equal(bl.shallowSlice(0, 5).toString(), 'First')
t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
})
tape('shallow slice with negative or omitted indices', function (t) {
t.plan(4)
const bl = new BufferList(['First', 'Second', 'Third'])
t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
})
tape('shallow slice does not make a copy', function (t) {
t.plan(1)
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
const bl = (new BufferList(buffers)).shallowSlice(5, -3)
buffers[1].fill('h')
buffers[2].fill('h')
t.equal(bl.toString(), 'hhhhhhhh')
})
tape('shallow slice with 0 length', function (t) {
t.plan(1)
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
const bl = (new BufferList(buffers)).shallowSlice(0, 0)
t.equal(bl.length, 0)
})
tape('shallow slice with 0 length from middle', function (t) {
t.plan(1)
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
const bl = (new BufferList(buffers)).shallowSlice(10, 10)
t.equal(bl.length, 0)
})
tape('duplicate', function (t) {
t.plan(2)
const bl = new BufferList('abcdefghij\xff\x00')
const dup = bl.duplicate()
t.equal(bl.prototype, dup.prototype)
t.equal(bl.toString('hex'), dup.toString('hex'))
})
tape('destroy no pipe', function (t) {
t.plan(2)
const bl = new BufferList('alsdkfja;lsdkfja;lsdk')
bl.destroy()
t.equal(bl._bufs.length, 0)
t.equal(bl.length, 0)
})
tape('destroy with error', function (t) {
t.plan(3)
const bl = new BufferList('alsdkfja;lsdkfja;lsdk')
const err = new Error('kaboom')
bl.destroy(err)
bl.on('error', function (_err) {
t.equal(_err, err)
})
t.equal(bl._bufs.length, 0)
t.equal(bl.length, 0)
})
!process.browser && tape('destroy with pipe before read end', function (t) {
t.plan(2)
const bl = new BufferList()
fs.createReadStream(path.join(__dirname, '/test.js'))
.pipe(bl)
bl.destroy()
t.equal(bl._bufs.length, 0)
t.equal(bl.length, 0)
})
!process.browser && tape('destroy with pipe before read end with race', function (t) {
t.plan(2)
const bl = new BufferList()
fs.createReadStream(path.join(__dirname, '/test.js'))
.pipe(bl)
setTimeout(function () {
bl.destroy()
setTimeout(function () {
t.equal(bl._bufs.length, 0)
t.equal(bl.length, 0)
}, 500)
}, 500)
})
!process.browser && tape('destroy with pipe after read end', function (t) {
t.plan(2)
const bl = new BufferList()
fs.createReadStream(path.join(__dirname, '/test.js'))
.on('end', onEnd)
.pipe(bl)
function onEnd () {
bl.destroy()
t.equal(bl._bufs.length, 0)
t.equal(bl.length, 0)
}
})
!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
t.plan(4)
const bl = new BufferList()
const ds = new BufferList()
fs.createReadStream(path.join(__dirname, '/test.js'))
.on('end', onEnd)
.pipe(bl)
function onEnd () {
bl.pipe(ds)
setTimeout(function () {
bl.destroy()
t.equals(bl._bufs.length, 0)
t.equals(bl.length, 0)
ds.destroy()
t.equals(bl._bufs.length, 0)
t.equals(bl.length, 0)
}, 100)
}
})
!process.browser && tape('handle error', function (t) {
t.plan(2)
fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
t.ok(err instanceof Error, 'has error')
t.notOk(data, 'no data')
}))
})

70
node_modules/buffer/AUTHORS.md generated vendored Normal file
View File

@@ -0,0 +1,70 @@
# Authors
#### Ordered by first contribution.
- Romain Beauxis (toots@rastageeks.org)
- Tobias Koppers (tobias.koppers@googlemail.com)
- Janus (ysangkok@gmail.com)
- Rainer Dreyer (rdrey1@gmail.com)
- Tõnis Tiigi (tonistiigi@gmail.com)
- James Halliday (mail@substack.net)
- Michael Williamson (mike@zwobble.org)
- elliottcable (github@elliottcable.name)
- rafael (rvalle@livelens.net)
- Andrew Kelley (superjoe30@gmail.com)
- Andreas Madsen (amwebdk@gmail.com)
- Mike Brevoort (mike.brevoort@pearson.com)
- Brian White (mscdex@mscdex.net)
- Feross Aboukhadijeh (feross@feross.org)
- Ruben Verborgh (ruben@verborgh.org)
- eliang (eliang.cs@gmail.com)
- Jesse Tane (jesse.tane@gmail.com)
- Alfonso Boza (alfonso@cloud.com)
- Mathias Buus (mathiasbuus@gmail.com)
- Devon Govett (devongovett@gmail.com)
- Daniel Cousens (github@dcousens.com)
- Joseph Dykstra (josephdykstra@gmail.com)
- Parsha Pourkhomami (parshap+git@gmail.com)
- Damjan Košir (damjan.kosir@gmail.com)
- daverayment (dave.rayment@gmail.com)
- kawanet (u-suke@kawa.net)
- Linus Unnebäck (linus@folkdatorn.se)
- Nolan Lawson (nolan.lawson@gmail.com)
- Calvin Metcalf (calvin.metcalf@gmail.com)
- Koki Takahashi (hakatasiloving@gmail.com)
- Guy Bedford (guybedford@gmail.com)
- Jan Schär (jscissr@gmail.com)
- RaulTsc (tomescu.raul@gmail.com)
- Matthieu Monsch (monsch@alum.mit.edu)
- Dan Ehrenberg (littledan@chromium.org)
- Kirill Fomichev (fanatid@ya.ru)
- Yusuke Kawasaki (u-suke@kawa.net)
- DC (dcposch@dcpos.ch)
- John-David Dalton (john.david.dalton@gmail.com)
- adventure-yunfei (adventure030@gmail.com)
- Emil Bay (github@tixz.dk)
- Sam Sudar (sudar.sam@gmail.com)
- Volker Mische (volker.mische@gmail.com)
- David Walton (support@geekstocks.com)
- Сковорода Никита Андреевич (chalkerx@gmail.com)
- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com)
- ukstv (sergey.ukustov@machinomy.com)
- Renée Kooi (renee@kooi.me)
- ranbochen (ranbochen@qq.com)
- Vladimir Borovik (bobahbdb@gmail.com)
- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com)
- kumavis (aaron@kumavis.me)
- Sergey Ukustov (sergey.ukustov@machinomy.com)
- Fei Liu (liu.feiwood@gmail.com)
- Blaine Bublitz (blaine.bublitz@gmail.com)
- clement (clement@seald.io)
- Koushik Dutta (koushd@gmail.com)
- Jordan Harband (ljharb@gmail.com)
- Niklas Mischkulnig (mischnic@users.noreply.github.com)
- Nikolai Vavilov (vvnicholas@gmail.com)
- Fedor Nezhivoi (gyzerok@users.noreply.github.com)
- Peter Newman (peternewman@users.noreply.github.com)
- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com)
- jkkang (jkkang@smartauth.kr)
#### Generated by bin/update-authors.sh.

21
node_modules/buffer/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Feross Aboukhadijeh, and other contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

410
node_modules/buffer/README.md generated vendored Normal file
View File

@@ -0,0 +1,410 @@
# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg
[travis-url]: https://travis-ci.org/feross/buffer
[npm-image]: https://img.shields.io/npm/v/buffer.svg
[npm-url]: https://npmjs.org/package/buffer
[downloads-image]: https://img.shields.io/npm/dm/buffer.svg
[downloads-url]: https://npmjs.org/package/buffer
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
[standard-url]: https://standardjs.com
#### The buffer module from [node.js](https://nodejs.org/), for the browser.
[![saucelabs][saucelabs-image]][saucelabs-url]
[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg
[saucelabs-url]: https://saucelabs.com/u/buffer
With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module.
The goal is to provide an API that is 100% identical to
[node's Buffer API](https://nodejs.org/api/buffer.html). Read the
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
instance methods, and class methods that are supported.
## features
- Manipulate binary data like a boss, in all browsers!
- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`)
- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments)
- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.)
- Preserves Node API exactly, with one minor difference (see below)
- Square-bracket `buf[4]` notation works!
- Does not modify any browser prototypes or put anything on `window`
- Comprehensive test suite (including all buffer tests from node.js core)
## install
To use this module directly (without browserify), install it:
```bash
npm install buffer
```
This module was previously called **native-buffer-browserify**, but please use **buffer**
from now on.
If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer).
## usage
The module's API is identical to node's `Buffer` API. Read the
[official docs](https://nodejs.org/api/buffer.html) for the full list of properties,
instance methods, and class methods that are supported.
As mentioned above, `require('buffer')` or use the `Buffer` global with
[browserify](http://browserify.org) and this module will automatically be included
in your bundle. Almost any npm module will work in the browser, even if it assumes that
the node `Buffer` API will be available.
To depend on this module explicitly (without browserify), require it like this:
```js
var Buffer = require('buffer/').Buffer // note: the trailing slash is important!
```
To require this module explicitly, use `require('buffer/')` which tells the node.js module
lookup algorithm (also used by browserify) to use the **npm module** named `buffer`
instead of the **node.js core** module named `buffer`!
## how does it work?
The Buffer constructor returns instances of `Uint8Array` that have their prototype
changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`,
so the returned instances will have all the node `Buffer` methods and the
`Uint8Array` methods. Square bracket notation works as expected -- it returns a
single octet.
The `Uint8Array` prototype remains unmodified.
## tracking the latest node api
This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer
API is considered **stable** in the
[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index),
so it is unlikely that there will ever be breaking changes.
Nonetheless, when/if the Buffer API changes in node, this module's API will change
accordingly.
## related packages
- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer
- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer
- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package
## conversion packages
### convert typed array to buffer
Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast.
### convert buffer to typed array
`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`.
### convert blob to buffer
Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`.
### convert buffer to blob
To convert a `Buffer` to a `Blob`, use the `Blob` constructor:
```js
var blob = new Blob([ buffer ])
```
Optionally, specify a mimetype:
```js
var blob = new Blob([ buffer ], { type: 'text/html' })
```
### convert arraybuffer to buffer
To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast.
```js
var buffer = Buffer.from(arrayBuffer)
```
### convert buffer to arraybuffer
To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects):
```js
var arrayBuffer = buffer.buffer.slice(
buffer.byteOffset, buffer.byteOffset + buffer.byteLength
)
```
Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module.
## performance
See perf tests in `/perf`.
`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a
sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will
always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module,
which is included to compare against.
NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README.
### Chrome 38
| Method | Operations | Accuracy | Sampled | Fastest |
|:-------|:-----------|:---------|:--------|:-------:|
| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ |
| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | |
| | | | |
| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | |
| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ |
| | | | |
| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | |
| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ |
| | | | |
| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | |
| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ |
| | | | |
| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | |
| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ |
| | | | |
| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | |
| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ |
| | | | |
| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ |
| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | |
| | | | |
| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ |
| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | |
| | | | |
| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ |
| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | |
| | | | |
| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | |
| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ |
| | | | |
| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | |
| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ |
### Firefox 33
| Method | Operations | Accuracy | Sampled | Fastest |
|:-------|:-----------|:---------|:--------|:-------:|
| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | |
| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ |
| | | | |
| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | |
| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ |
| | | | |
| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | |
| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ |
| | | | |
| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | |
| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ |
| | | | |
| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | |
| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ |
| | | | |
| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | |
| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ |
| | | | |
| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | |
| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ |
| | | | |
| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | |
| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ |
| | | | |
| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ |
| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | |
| | | | |
| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | |
| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ |
| | | | |
| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | |
| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ |
### Safari 8
| Method | Operations | Accuracy | Sampled | Fastest |
|:-------|:-----------|:---------|:--------|:-------:|
| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ |
| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | |
| | | | |
| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | |
| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ |
| | | | |
| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | |
| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ |
| | | | |
| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | |
| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ |
| | | | |
| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | |
| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ |
| | | | |
| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | |
| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ |
| | | | |
| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | |
| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ |
| | | | |
| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | |
| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ |
| | | | |
| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | |
| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ |
| | | | |
| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | |
| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ |
| | | | |
| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | |
| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ |
### Node 0.11.14
| Method | Operations | Accuracy | Sampled | Fastest |
|:-------|:-----------|:---------|:--------|:-------:|
| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | |
| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ |
| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | |
| | | | |
| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | |
| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ |
| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | |
| | | | |
| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | |
| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ |
| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | |
| | | | |
| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | |
| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ |
| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | |
| | | | |
| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | |
| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | |
| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ |
| | | | |
| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | |
| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ |
| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | |
| | | | |
| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ |
| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | |
| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | |
| | | | |
| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ |
| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | |
| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | |
| | | | |
| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | |
| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | |
| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ |
| | | | |
| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | |
| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ |
| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | |
| | | | |
| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | |
| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ |
| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | |
### iojs 1.8.1
| Method | Operations | Accuracy | Sampled | Fastest |
|:-------|:-----------|:---------|:--------|:-------:|
| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | |
| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | |
| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ |
| | | | |
| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | |
| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | |
| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ |
| | | | |
| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | |
| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | |
| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ |
| | | | |
| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | |
| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ |
| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | |
| | | | |
| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | |
| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | |
| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ |
| | | | |
| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | |
| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ |
| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | |
| | | | |
| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ |
| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | |
| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | |
| | | | |
| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ |
| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | |
| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | |
| | | | |
| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | |
| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | |
| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ |
| | | | |
| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | |
| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | |
| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ |
| | | | |
| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | |
| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ |
| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | |
| | | | |
## Testing the project
First, install the project:
npm install
Then, to run tests in Node.js, run:
npm run test-node
To test locally in a browser, you can run:
npm run test-browser-es5-local # For ES5 browsers that don't support ES6
npm run test-browser-es6-local # For ES6 compliant browsers
This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap).
To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run:
npm test
This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files.
## JavaScript Standard Style
This module uses [JavaScript Standard Style](https://github.com/feross/standard).
[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard)
To test that the code conforms to the style, `npm install` and run:
./node_modules/.bin/standard
## credit
This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify).
## Security Policies and Procedures
The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues.
## license
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis.

186
node_modules/buffer/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,186 @@
export class Buffer extends Uint8Array {
length: number
write(string: string, offset?: number, length?: number, encoding?: string): number;
toString(encoding?: string, start?: number, end?: number): string;
toJSON(): { type: 'Buffer', data: any[] };
equals(otherBuffer: Buffer): boolean;
compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
slice(start?: number, end?: number): Buffer;
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
readUInt8(offset: number, noAssert?: boolean): number;
readUInt16LE(offset: number, noAssert?: boolean): number;
readUInt16BE(offset: number, noAssert?: boolean): number;
readUInt32LE(offset: number, noAssert?: boolean): number;
readUInt32BE(offset: number, noAssert?: boolean): number;
readInt8(offset: number, noAssert?: boolean): number;
readInt16LE(offset: number, noAssert?: boolean): number;
readInt16BE(offset: number, noAssert?: boolean): number;
readInt32LE(offset: number, noAssert?: boolean): number;
readInt32BE(offset: number, noAssert?: boolean): number;
readFloatLE(offset: number, noAssert?: boolean): number;
readFloatBE(offset: number, noAssert?: boolean): number;
readDoubleLE(offset: number, noAssert?: boolean): number;
readDoubleBE(offset: number, noAssert?: boolean): number;
reverse(): this;
swap16(): Buffer;
swap32(): Buffer;
swap64(): Buffer;
writeUInt8(value: number, offset: number, noAssert?: boolean): number;
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeInt8(value: number, offset: number, noAssert?: boolean): number;
writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
fill(value: any, offset?: number, end?: number): this;
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
/**
* Allocates a new buffer containing the given {str}.
*
* @param str String to store in buffer.
* @param encoding encoding to use, optional. Default is 'utf8'
*/
constructor (str: string, encoding?: string);
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
*/
constructor (size: number);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
constructor (array: Uint8Array);
/**
* Produces a Buffer backed by the same allocated memory as
* the given {ArrayBuffer}.
*
*
* @param arrayBuffer The ArrayBuffer with which to share memory.
*/
constructor (arrayBuffer: ArrayBuffer);
/**
* Allocates a new buffer containing the given {array} of octets.
*
* @param array The octets to store.
*/
constructor (array: any[]);
/**
* Copies the passed {buffer} data onto a new {Buffer} instance.
*
* @param buffer The buffer to copy.
*/
constructor (buffer: Buffer);
prototype: Buffer;
/**
* Allocates a new Buffer using an {array} of octets.
*
* @param array
*/
static from(array: any[]): Buffer;
/**
* When passed a reference to the .buffer property of a TypedArray instance,
* the newly created Buffer will share the same allocated memory as the TypedArray.
* The optional {byteOffset} and {length} arguments specify a memory range
* within the {arrayBuffer} that will be shared by the Buffer.
*
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
* @param byteOffset
* @param length
*/
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
/**
* Copies the passed {buffer} data onto a new Buffer instance.
*
* @param buffer
*/
static from(buffer: Buffer | Uint8Array): Buffer;
/**
* Creates a new Buffer containing the given JavaScript string {str}.
* If provided, the {encoding} parameter identifies the character encoding.
* If not provided, {encoding} defaults to 'utf8'.
*
* @param str
*/
static from(str: string, encoding?: string): Buffer;
/**
* Returns true if {obj} is a Buffer
*
* @param obj object to test.
*/
static isBuffer(obj: any): obj is Buffer;
/**
* Returns true if {encoding} is a valid encoding argument.
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
*
* @param encoding string to test.
*/
static isEncoding(encoding: string): boolean;
/**
* Gives the actual byte length of a string. encoding defaults to 'utf8'.
* This is not the same as String.prototype.length since that returns the number of characters in a string.
*
* @param string string to test.
* @param encoding encoding used to evaluate (defaults to 'utf8')
*/
static byteLength(string: string, encoding?: string): number;
/**
* Returns a buffer which is the result of concatenating all the buffers in the list together.
*
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
* If the list has exactly one item, then the first item of the list is returned.
* If the list has more than one item, then a new Buffer is created.
*
* @param list An array of Buffer objects to concatenate
* @param totalLength Total length of the buffers when concatenated.
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
*/
static concat(list: Buffer[], totalLength?: number): Buffer;
/**
* The same as buf1.compare(buf2).
*/
static compare(buf1: Buffer, buf2: Buffer): number;
/**
* Allocates a new buffer of {size} octets.
*
* @param size count of octets to allocate.
* @param fill if specified, buffer will be initialized by calling buf.fill(fill).
* If parameter is omitted, buffer will be filled with zeros.
* @param encoding encoding used for call to buf.fill while initializing
*/
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
/**
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafe(size: number): Buffer;
/**
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
* of the newly created Buffer are unknown and may contain sensitive data.
*
* @param size count of octets to allocate
*/
static allocUnsafeSlow(size: number): Buffer;
}

1817
node_modules/buffer/index.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

96
node_modules/buffer/package.json generated vendored Normal file
View File

@@ -0,0 +1,96 @@
{
"name": "buffer",
"description": "Node.js Buffer API, for the browser",
"version": "5.7.1",
"author": {
"name": "Feross Aboukhadijeh",
"email": "feross@feross.org",
"url": "https://feross.org"
},
"bugs": {
"url": "https://github.com/feross/buffer/issues"
},
"contributors": [
"Romain Beauxis <toots@rastageeks.org>",
"James Halliday <mail@substack.net>"
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
},
"devDependencies": {
"airtap": "^3.0.0",
"benchmark": "^2.1.4",
"browserify": "^17.0.0",
"concat-stream": "^2.0.0",
"hyperquest": "^2.1.3",
"is-buffer": "^2.0.4",
"is-nan": "^1.3.0",
"split": "^1.0.1",
"standard": "*",
"tape": "^5.0.1",
"through2": "^4.0.2",
"uglify-js": "^3.11.3"
},
"homepage": "https://github.com/feross/buffer",
"jspm": {
"map": {
"./index.js": {
"node": "@node/buffer"
}
}
},
"keywords": [
"arraybuffer",
"browser",
"browserify",
"buffer",
"compatible",
"dataview",
"uint8array"
],
"license": "MIT",
"main": "index.js",
"types": "index.d.ts",
"repository": {
"type": "git",
"url": "git://github.com/feross/buffer.git"
},
"scripts": {
"perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html",
"perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js",
"size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c",
"test": "standard && node ./bin/test.js",
"test-browser-es5": "airtap -- test/*.js",
"test-browser-es5-local": "airtap --local -- test/*.js",
"test-browser-es6": "airtap -- test/*.js test/node/*.js",
"test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js",
"test-node": "tape test/*.js test/node/*.js",
"update-authors": "./bin/update-authors.sh"
},
"standard": {
"ignore": [
"test/node/**/*.js",
"test/common.js",
"test/_polyfill.js",
"perf/**/*.js"
],
"globals": [
"SharedArrayBuffer"
]
},
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
}

15
node_modules/chownr/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

3
node_modules/chownr/README.md generated vendored Normal file
View File

@@ -0,0 +1,3 @@
Like `chown -R`.
Takes the same arguments as `fs.chown()`

167
node_modules/chownr/chownr.js generated vendored Normal file
View File

@@ -0,0 +1,167 @@
'use strict'
const fs = require('fs')
const path = require('path')
/* istanbul ignore next */
const LCHOWN = fs.lchown ? 'lchown' : 'chown'
/* istanbul ignore next */
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
/* istanbul ignore next */
const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
const lchownSync = (path, uid, gid) => {
try {
return fs[LCHOWNSYNC](path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
/* istanbul ignore next */
const chownSync = (path, uid, gid) => {
try {
return fs.chownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
/* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
}
: (_, __, ___, cb) => cb
/* istanbul ignore next */
const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
}
}
: (path, uid, gid) => lchownSync(path, uid, gid)
// fs.readdir could only accept an options object as of node v6
const nodeVersion = process.version
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
const chown = (cpath, uid, gid, cb) => {
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
// Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
}
const chownrKid = (p, child, uid, gid, cb) => {
if (typeof child === 'string')
return fs.lstat(path.resolve(p, child), (er, stats) => {
// Skip ENOENT error
if (er)
return cb(er.code !== 'ENOENT' ? er : null)
stats.name = child
chownrKid(p, stats, uid, gid, cb)
})
if (child.isDirectory()) {
chownr(path.resolve(p, child.name), uid, gid, er => {
if (er)
return cb(er)
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
})
} else {
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
}
}
const chownr = (p, uid, gid, cb) => {
readdir(p, { withFileTypes: true }, (er, children) => {
// any error other than ENOTDIR or ENOTSUP means it's not readable,
// or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
}
if (er || !children.length)
return chown(p, uid, gid, cb)
let len = children.length
let errState = null
const then = er => {
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
children.forEach(child => chownrKid(p, child, uid, gid, then))
})
}
const chownrKidSync = (p, child, uid, gid) => {
if (typeof child === 'string') {
try {
const stats = fs.lstatSync(path.resolve(p, child))
stats.name = child
child = stats
} catch (er) {
if (er.code === 'ENOENT')
return
else
throw er
}
}
if (child.isDirectory())
chownrSync(path.resolve(p, child.name), uid, gid)
handleEISDirSync(path.resolve(p, child.name), uid, gid)
}
const chownrSync = (p, uid, gid) => {
let children
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
}
if (children && children.length)
children.forEach(child => chownrKidSync(p, child, uid, gid))
return handleEISDirSync(p, uid, gid)
}
module.exports = chownr
chownr.sync = chownrSync

29
node_modules/chownr/package.json generated vendored Normal file
View File

@@ -0,0 +1,29 @@
{
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"name": "chownr",
"description": "like `chown -R`",
"version": "1.1.4",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/chownr.git"
},
"main": "chownr.js",
"files": [
"chownr.js"
],
"devDependencies": {
"mkdirp": "0.3",
"rimraf": "^2.7.1",
"tap": "^14.10.6"
},
"tap": {
"check-coverage": true
},
"scripts": {
"test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
},
"license": "ISC"
}

54
node_modules/color-convert/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,54 @@
# 1.0.0 - 2016-01-07
- Removed: unused speed test
- Added: Automatic routing between previously unsupported conversions
([#27](https://github.com/Qix-/color-convert/pull/27))
- Removed: `xxx2xxx()` and `xxx2xxxRaw()` functions
([#27](https://github.com/Qix-/color-convert/pull/27))
- Removed: `convert()` class
([#27](https://github.com/Qix-/color-convert/pull/27))
- Changed: all functions to lookup dictionary
([#27](https://github.com/Qix-/color-convert/pull/27))
- Changed: `ansi` to `ansi256`
([#27](https://github.com/Qix-/color-convert/pull/27))
- Fixed: argument grouping for functions requiring only one argument
([#27](https://github.com/Qix-/color-convert/pull/27))
# 0.6.0 - 2015-07-23
- Added: methods to handle
[ANSI](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors) 16/256 colors:
- rgb2ansi16
- rgb2ansi
- hsl2ansi16
- hsl2ansi
- hsv2ansi16
- hsv2ansi
- hwb2ansi16
- hwb2ansi
- cmyk2ansi16
- cmyk2ansi
- keyword2ansi16
- keyword2ansi
- ansi162rgb
- ansi162hsl
- ansi162hsv
- ansi162hwb
- ansi162cmyk
- ansi162keyword
- ansi2rgb
- ansi2hsl
- ansi2hsv
- ansi2hwb
- ansi2cmyk
- ansi2keyword
([#18](https://github.com/harthur/color-convert/pull/18))
# 0.5.3 - 2015-06-02
- Fixed: hsl2hsv does not return `NaN` anymore when using `[0,0,0]`
([#15](https://github.com/harthur/color-convert/issues/15))
---
Check out commit logs for older releases

21
node_modules/color-convert/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
Copyright (c) 2011-2016 Heather Arthur <fayearthur@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

68
node_modules/color-convert/README.md generated vendored Normal file
View File

@@ -0,0 +1,68 @@
# color-convert
[![Build Status](https://travis-ci.org/Qix-/color-convert.svg?branch=master)](https://travis-ci.org/Qix-/color-convert)
Color-convert is a color conversion library for JavaScript and node.
It converts all ways between `rgb`, `hsl`, `hsv`, `hwb`, `cmyk`, `ansi`, `ansi16`, `hex` strings, and CSS `keyword`s (will round to closest):
```js
var convert = require('color-convert');
convert.rgb.hsl(140, 200, 100); // [96, 48, 59]
convert.keyword.rgb('blue'); // [0, 0, 255]
var rgbChannels = convert.rgb.channels; // 3
var cmykChannels = convert.cmyk.channels; // 4
var ansiChannels = convert.ansi16.channels; // 1
```
# Install
```console
$ npm install color-convert
```
# API
Simply get the property of the _from_ and _to_ conversion that you're looking for.
All functions have a rounded and unrounded variant. By default, return values are rounded. To get the unrounded (raw) results, simply tack on `.raw` to the function.
All 'from' functions have a hidden property called `.channels` that indicates the number of channels the function expects (not including alpha).
```js
var convert = require('color-convert');
// Hex to LAB
convert.hex.lab('DEADBF'); // [ 76, 21, -2 ]
convert.hex.lab.raw('DEADBF'); // [ 75.56213190997677, 20.653827952644754, -2.290532499330533 ]
// RGB to CMYK
convert.rgb.cmyk(167, 255, 4); // [ 35, 0, 98, 0 ]
convert.rgb.cmyk.raw(167, 255, 4); // [ 34.509803921568626, 0, 98.43137254901961, 0 ]
```
### Arrays
All functions that accept multiple arguments also support passing an array.
Note that this does **not** apply to functions that convert from a color that only requires one value (e.g. `keyword`, `ansi256`, `hex`, etc.)
```js
var convert = require('color-convert');
convert.rgb.hex(123, 45, 67); // '7B2D43'
convert.rgb.hex([123, 45, 67]); // '7B2D43'
```
## Routing
Conversions that don't have an _explicitly_ defined conversion (in [conversions.js](conversions.js)), but can be converted by means of sub-conversions (e.g. XYZ -> **RGB** -> CMYK), are automatically routed together. This allows just about any color model supported by `color-convert` to be converted to any other model, so long as a sub-conversion path exists. This is also true for conversions requiring more than one step in between (e.g. LCH -> **LAB** -> **XYZ** -> **RGB** -> Hex).
Keep in mind that extensive conversions _may_ result in a loss of precision, and exist only to be complete. For a list of "direct" (single-step) conversions, see [conversions.js](conversions.js).
# Contribute
If there is a new model you would like to support, or want to add a direct conversion between two existing models, please send us a pull request.
# License
Copyright &copy; 2011-2016, Heather Arthur and Josh Junon. Licensed under the [MIT License](LICENSE).

839
node_modules/color-convert/conversions.js generated vendored Normal file
View File

@@ -0,0 +1,839 @@
/* MIT license */
/* eslint-disable no-mixed-operators */
const cssKeywords = require('color-name');
// NOTE: conversions should only return primitive values (i.e. arrays, or
// values that give correct `typeof` results).
// do not use box values types (i.e. Number(), String(), etc.)
const reverseKeywords = {};
for (const key of Object.keys(cssKeywords)) {
reverseKeywords[cssKeywords[key]] = key;
}
const convert = {
rgb: {channels: 3, labels: 'rgb'},
hsl: {channels: 3, labels: 'hsl'},
hsv: {channels: 3, labels: 'hsv'},
hwb: {channels: 3, labels: 'hwb'},
cmyk: {channels: 4, labels: 'cmyk'},
xyz: {channels: 3, labels: 'xyz'},
lab: {channels: 3, labels: 'lab'},
lch: {channels: 3, labels: 'lch'},
hex: {channels: 1, labels: ['hex']},
keyword: {channels: 1, labels: ['keyword']},
ansi16: {channels: 1, labels: ['ansi16']},
ansi256: {channels: 1, labels: ['ansi256']},
hcg: {channels: 3, labels: ['h', 'c', 'g']},
apple: {channels: 3, labels: ['r16', 'g16', 'b16']},
gray: {channels: 1, labels: ['gray']}
};
module.exports = convert;
// Hide .channels and .labels properties
for (const model of Object.keys(convert)) {
if (!('channels' in convert[model])) {
throw new Error('missing channels property: ' + model);
}
if (!('labels' in convert[model])) {
throw new Error('missing channel labels property: ' + model);
}
if (convert[model].labels.length !== convert[model].channels) {
throw new Error('channel and label counts mismatch: ' + model);
}
const {channels, labels} = convert[model];
delete convert[model].channels;
delete convert[model].labels;
Object.defineProperty(convert[model], 'channels', {value: channels});
Object.defineProperty(convert[model], 'labels', {value: labels});
}
convert.rgb.hsl = function (rgb) {
const r = rgb[0] / 255;
const g = rgb[1] / 255;
const b = rgb[2] / 255;
const min = Math.min(r, g, b);
const max = Math.max(r, g, b);
const delta = max - min;
let h;
let s;
if (max === min) {
h = 0;
} else if (r === max) {
h = (g - b) / delta;
} else if (g === max) {
h = 2 + (b - r) / delta;
} else if (b === max) {
h = 4 + (r - g) / delta;
}
h = Math.min(h * 60, 360);
if (h < 0) {
h += 360;
}
const l = (min + max) / 2;
if (max === min) {
s = 0;
} else if (l <= 0.5) {
s = delta / (max + min);
} else {
s = delta / (2 - max - min);
}
return [h, s * 100, l * 100];
};
convert.rgb.hsv = function (rgb) {
let rdif;
let gdif;
let bdif;
let h;
let s;
const r = rgb[0] / 255;
const g = rgb[1] / 255;
const b = rgb[2] / 255;
const v = Math.max(r, g, b);
const diff = v - Math.min(r, g, b);
const diffc = function (c) {
return (v - c) / 6 / diff + 1 / 2;
};
if (diff === 0) {
h = 0;
s = 0;
} else {
s = diff / v;
rdif = diffc(r);
gdif = diffc(g);
bdif = diffc(b);
if (r === v) {
h = bdif - gdif;
} else if (g === v) {
h = (1 / 3) + rdif - bdif;
} else if (b === v) {
h = (2 / 3) + gdif - rdif;
}
if (h < 0) {
h += 1;
} else if (h > 1) {
h -= 1;
}
}
return [
h * 360,
s * 100,
v * 100
];
};
convert.rgb.hwb = function (rgb) {
const r = rgb[0];
const g = rgb[1];
let b = rgb[2];
const h = convert.rgb.hsl(rgb)[0];
const w = 1 / 255 * Math.min(r, Math.min(g, b));
b = 1 - 1 / 255 * Math.max(r, Math.max(g, b));
return [h, w * 100, b * 100];
};
convert.rgb.cmyk = function (rgb) {
const r = rgb[0] / 255;
const g = rgb[1] / 255;
const b = rgb[2] / 255;
const k = Math.min(1 - r, 1 - g, 1 - b);
const c = (1 - r - k) / (1 - k) || 0;
const m = (1 - g - k) / (1 - k) || 0;
const y = (1 - b - k) / (1 - k) || 0;
return [c * 100, m * 100, y * 100, k * 100];
};
function comparativeDistance(x, y) {
/*
See https://en.m.wikipedia.org/wiki/Euclidean_distance#Squared_Euclidean_distance
*/
return (
((x[0] - y[0]) ** 2) +
((x[1] - y[1]) ** 2) +
((x[2] - y[2]) ** 2)
);
}
convert.rgb.keyword = function (rgb) {
const reversed = reverseKeywords[rgb];
if (reversed) {
return reversed;
}
let currentClosestDistance = Infinity;
let currentClosestKeyword;
for (const keyword of Object.keys(cssKeywords)) {
const value = cssKeywords[keyword];
// Compute comparative distance
const distance = comparativeDistance(rgb, value);
// Check if its less, if so set as closest
if (distance < currentClosestDistance) {
currentClosestDistance = distance;
currentClosestKeyword = keyword;
}
}
return currentClosestKeyword;
};
convert.keyword.rgb = function (keyword) {
return cssKeywords[keyword];
};
convert.rgb.xyz = function (rgb) {
let r = rgb[0] / 255;
let g = rgb[1] / 255;
let b = rgb[2] / 255;
// Assume sRGB
r = r > 0.04045 ? (((r + 0.055) / 1.055) ** 2.4) : (r / 12.92);
g = g > 0.04045 ? (((g + 0.055) / 1.055) ** 2.4) : (g / 12.92);
b = b > 0.04045 ? (((b + 0.055) / 1.055) ** 2.4) : (b / 12.92);
const x = (r * 0.4124) + (g * 0.3576) + (b * 0.1805);
const y = (r * 0.2126) + (g * 0.7152) + (b * 0.0722);
const z = (r * 0.0193) + (g * 0.1192) + (b * 0.9505);
return [x * 100, y * 100, z * 100];
};
convert.rgb.lab = function (rgb) {
const xyz = convert.rgb.xyz(rgb);
let x = xyz[0];
let y = xyz[1];
let z = xyz[2];
x /= 95.047;
y /= 100;
z /= 108.883;
x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116);
y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116);
z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116);
const l = (116 * y) - 16;
const a = 500 * (x - y);
const b = 200 * (y - z);
return [l, a, b];
};
convert.hsl.rgb = function (hsl) {
const h = hsl[0] / 360;
const s = hsl[1] / 100;
const l = hsl[2] / 100;
let t2;
let t3;
let val;
if (s === 0) {
val = l * 255;
return [val, val, val];
}
if (l < 0.5) {
t2 = l * (1 + s);
} else {
t2 = l + s - l * s;
}
const t1 = 2 * l - t2;
const rgb = [0, 0, 0];
for (let i = 0; i < 3; i++) {
t3 = h + 1 / 3 * -(i - 1);
if (t3 < 0) {
t3++;
}
if (t3 > 1) {
t3--;
}
if (6 * t3 < 1) {
val = t1 + (t2 - t1) * 6 * t3;
} else if (2 * t3 < 1) {
val = t2;
} else if (3 * t3 < 2) {
val = t1 + (t2 - t1) * (2 / 3 - t3) * 6;
} else {
val = t1;
}
rgb[i] = val * 255;
}
return rgb;
};
convert.hsl.hsv = function (hsl) {
const h = hsl[0];
let s = hsl[1] / 100;
let l = hsl[2] / 100;
let smin = s;
const lmin = Math.max(l, 0.01);
l *= 2;
s *= (l <= 1) ? l : 2 - l;
smin *= lmin <= 1 ? lmin : 2 - lmin;
const v = (l + s) / 2;
const sv = l === 0 ? (2 * smin) / (lmin + smin) : (2 * s) / (l + s);
return [h, sv * 100, v * 100];
};
convert.hsv.rgb = function (hsv) {
const h = hsv[0] / 60;
const s = hsv[1] / 100;
let v = hsv[2] / 100;
const hi = Math.floor(h) % 6;
const f = h - Math.floor(h);
const p = 255 * v * (1 - s);
const q = 255 * v * (1 - (s * f));
const t = 255 * v * (1 - (s * (1 - f)));
v *= 255;
switch (hi) {
case 0:
return [v, t, p];
case 1:
return [q, v, p];
case 2:
return [p, v, t];
case 3:
return [p, q, v];
case 4:
return [t, p, v];
case 5:
return [v, p, q];
}
};
convert.hsv.hsl = function (hsv) {
const h = hsv[0];
const s = hsv[1] / 100;
const v = hsv[2] / 100;
const vmin = Math.max(v, 0.01);
let sl;
let l;
l = (2 - s) * v;
const lmin = (2 - s) * vmin;
sl = s * vmin;
sl /= (lmin <= 1) ? lmin : 2 - lmin;
sl = sl || 0;
l /= 2;
return [h, sl * 100, l * 100];
};
// http://dev.w3.org/csswg/css-color/#hwb-to-rgb
convert.hwb.rgb = function (hwb) {
const h = hwb[0] / 360;
let wh = hwb[1] / 100;
let bl = hwb[2] / 100;
const ratio = wh + bl;
let f;
// Wh + bl cant be > 1
if (ratio > 1) {
wh /= ratio;
bl /= ratio;
}
const i = Math.floor(6 * h);
const v = 1 - bl;
f = 6 * h - i;
if ((i & 0x01) !== 0) {
f = 1 - f;
}
const n = wh + f * (v - wh); // Linear interpolation
let r;
let g;
let b;
/* eslint-disable max-statements-per-line,no-multi-spaces */
switch (i) {
default:
case 6:
case 0: r = v; g = n; b = wh; break;
case 1: r = n; g = v; b = wh; break;
case 2: r = wh; g = v; b = n; break;
case 3: r = wh; g = n; b = v; break;
case 4: r = n; g = wh; b = v; break;
case 5: r = v; g = wh; b = n; break;
}
/* eslint-enable max-statements-per-line,no-multi-spaces */
return [r * 255, g * 255, b * 255];
};
convert.cmyk.rgb = function (cmyk) {
const c = cmyk[0] / 100;
const m = cmyk[1] / 100;
const y = cmyk[2] / 100;
const k = cmyk[3] / 100;
const r = 1 - Math.min(1, c * (1 - k) + k);
const g = 1 - Math.min(1, m * (1 - k) + k);
const b = 1 - Math.min(1, y * (1 - k) + k);
return [r * 255, g * 255, b * 255];
};
convert.xyz.rgb = function (xyz) {
const x = xyz[0] / 100;
const y = xyz[1] / 100;
const z = xyz[2] / 100;
let r;
let g;
let b;
r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);
g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);
b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);
// Assume sRGB
r = r > 0.0031308
? ((1.055 * (r ** (1.0 / 2.4))) - 0.055)
: r * 12.92;
g = g > 0.0031308
? ((1.055 * (g ** (1.0 / 2.4))) - 0.055)
: g * 12.92;
b = b > 0.0031308
? ((1.055 * (b ** (1.0 / 2.4))) - 0.055)
: b * 12.92;
r = Math.min(Math.max(0, r), 1);
g = Math.min(Math.max(0, g), 1);
b = Math.min(Math.max(0, b), 1);
return [r * 255, g * 255, b * 255];
};
convert.xyz.lab = function (xyz) {
let x = xyz[0];
let y = xyz[1];
let z = xyz[2];
x /= 95.047;
y /= 100;
z /= 108.883;
x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116);
y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116);
z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116);
const l = (116 * y) - 16;
const a = 500 * (x - y);
const b = 200 * (y - z);
return [l, a, b];
};
convert.lab.xyz = function (lab) {
const l = lab[0];
const a = lab[1];
const b = lab[2];
let x;
let y;
let z;
y = (l + 16) / 116;
x = a / 500 + y;
z = y - b / 200;
const y2 = y ** 3;
const x2 = x ** 3;
const z2 = z ** 3;
y = y2 > 0.008856 ? y2 : (y - 16 / 116) / 7.787;
x = x2 > 0.008856 ? x2 : (x - 16 / 116) / 7.787;
z = z2 > 0.008856 ? z2 : (z - 16 / 116) / 7.787;
x *= 95.047;
y *= 100;
z *= 108.883;
return [x, y, z];
};
convert.lab.lch = function (lab) {
const l = lab[0];
const a = lab[1];
const b = lab[2];
let h;
const hr = Math.atan2(b, a);
h = hr * 360 / 2 / Math.PI;
if (h < 0) {
h += 360;
}
const c = Math.sqrt(a * a + b * b);
return [l, c, h];
};
convert.lch.lab = function (lch) {
const l = lch[0];
const c = lch[1];
const h = lch[2];
const hr = h / 360 * 2 * Math.PI;
const a = c * Math.cos(hr);
const b = c * Math.sin(hr);
return [l, a, b];
};
convert.rgb.ansi16 = function (args, saturation = null) {
const [r, g, b] = args;
let value = saturation === null ? convert.rgb.hsv(args)[2] : saturation; // Hsv -> ansi16 optimization
value = Math.round(value / 50);
if (value === 0) {
return 30;
}
let ansi = 30
+ ((Math.round(b / 255) << 2)
| (Math.round(g / 255) << 1)
| Math.round(r / 255));
if (value === 2) {
ansi += 60;
}
return ansi;
};
convert.hsv.ansi16 = function (args) {
// Optimization here; we already know the value and don't need to get
// it converted for us.
return convert.rgb.ansi16(convert.hsv.rgb(args), args[2]);
};
convert.rgb.ansi256 = function (args) {
const r = args[0];
const g = args[1];
const b = args[2];
// We use the extended greyscale palette here, with the exception of
// black and white. normal palette only has 4 greyscale shades.
if (r === g && g === b) {
if (r < 8) {
return 16;
}
if (r > 248) {
return 231;
}
return Math.round(((r - 8) / 247) * 24) + 232;
}
const ansi = 16
+ (36 * Math.round(r / 255 * 5))
+ (6 * Math.round(g / 255 * 5))
+ Math.round(b / 255 * 5);
return ansi;
};
convert.ansi16.rgb = function (args) {
let color = args % 10;
// Handle greyscale
if (color === 0 || color === 7) {
if (args > 50) {
color += 3.5;
}
color = color / 10.5 * 255;
return [color, color, color];
}
const mult = (~~(args > 50) + 1) * 0.5;
const r = ((color & 1) * mult) * 255;
const g = (((color >> 1) & 1) * mult) * 255;
const b = (((color >> 2) & 1) * mult) * 255;
return [r, g, b];
};
convert.ansi256.rgb = function (args) {
// Handle greyscale
if (args >= 232) {
const c = (args - 232) * 10 + 8;
return [c, c, c];
}
args -= 16;
let rem;
const r = Math.floor(args / 36) / 5 * 255;
const g = Math.floor((rem = args % 36) / 6) / 5 * 255;
const b = (rem % 6) / 5 * 255;
return [r, g, b];
};
convert.rgb.hex = function (args) {
const integer = ((Math.round(args[0]) & 0xFF) << 16)
+ ((Math.round(args[1]) & 0xFF) << 8)
+ (Math.round(args[2]) & 0xFF);
const string = integer.toString(16).toUpperCase();
return '000000'.substring(string.length) + string;
};
convert.hex.rgb = function (args) {
const match = args.toString(16).match(/[a-f0-9]{6}|[a-f0-9]{3}/i);
if (!match) {
return [0, 0, 0];
}
let colorString = match[0];
if (match[0].length === 3) {
colorString = colorString.split('').map(char => {
return char + char;
}).join('');
}
const integer = parseInt(colorString, 16);
const r = (integer >> 16) & 0xFF;
const g = (integer >> 8) & 0xFF;
const b = integer & 0xFF;
return [r, g, b];
};
convert.rgb.hcg = function (rgb) {
const r = rgb[0] / 255;
const g = rgb[1] / 255;
const b = rgb[2] / 255;
const max = Math.max(Math.max(r, g), b);
const min = Math.min(Math.min(r, g), b);
const chroma = (max - min);
let grayscale;
let hue;
if (chroma < 1) {
grayscale = min / (1 - chroma);
} else {
grayscale = 0;
}
if (chroma <= 0) {
hue = 0;
} else
if (max === r) {
hue = ((g - b) / chroma) % 6;
} else
if (max === g) {
hue = 2 + (b - r) / chroma;
} else {
hue = 4 + (r - g) / chroma;
}
hue /= 6;
hue %= 1;
return [hue * 360, chroma * 100, grayscale * 100];
};
convert.hsl.hcg = function (hsl) {
const s = hsl[1] / 100;
const l = hsl[2] / 100;
const c = l < 0.5 ? (2.0 * s * l) : (2.0 * s * (1.0 - l));
let f = 0;
if (c < 1.0) {
f = (l - 0.5 * c) / (1.0 - c);
}
return [hsl[0], c * 100, f * 100];
};
convert.hsv.hcg = function (hsv) {
const s = hsv[1] / 100;
const v = hsv[2] / 100;
const c = s * v;
let f = 0;
if (c < 1.0) {
f = (v - c) / (1 - c);
}
return [hsv[0], c * 100, f * 100];
};
convert.hcg.rgb = function (hcg) {
const h = hcg[0] / 360;
const c = hcg[1] / 100;
const g = hcg[2] / 100;
if (c === 0.0) {
return [g * 255, g * 255, g * 255];
}
const pure = [0, 0, 0];
const hi = (h % 1) * 6;
const v = hi % 1;
const w = 1 - v;
let mg = 0;
/* eslint-disable max-statements-per-line */
switch (Math.floor(hi)) {
case 0:
pure[0] = 1; pure[1] = v; pure[2] = 0; break;
case 1:
pure[0] = w; pure[1] = 1; pure[2] = 0; break;
case 2:
pure[0] = 0; pure[1] = 1; pure[2] = v; break;
case 3:
pure[0] = 0; pure[1] = w; pure[2] = 1; break;
case 4:
pure[0] = v; pure[1] = 0; pure[2] = 1; break;
default:
pure[0] = 1; pure[1] = 0; pure[2] = w;
}
/* eslint-enable max-statements-per-line */
mg = (1.0 - c) * g;
return [
(c * pure[0] + mg) * 255,
(c * pure[1] + mg) * 255,
(c * pure[2] + mg) * 255
];
};
convert.hcg.hsv = function (hcg) {
const c = hcg[1] / 100;
const g = hcg[2] / 100;
const v = c + g * (1.0 - c);
let f = 0;
if (v > 0.0) {
f = c / v;
}
return [hcg[0], f * 100, v * 100];
};
convert.hcg.hsl = function (hcg) {
const c = hcg[1] / 100;
const g = hcg[2] / 100;
const l = g * (1.0 - c) + 0.5 * c;
let s = 0;
if (l > 0.0 && l < 0.5) {
s = c / (2 * l);
} else
if (l >= 0.5 && l < 1.0) {
s = c / (2 * (1 - l));
}
return [hcg[0], s * 100, l * 100];
};
convert.hcg.hwb = function (hcg) {
const c = hcg[1] / 100;
const g = hcg[2] / 100;
const v = c + g * (1.0 - c);
return [hcg[0], (v - c) * 100, (1 - v) * 100];
};
convert.hwb.hcg = function (hwb) {
const w = hwb[1] / 100;
const b = hwb[2] / 100;
const v = 1 - b;
const c = v - w;
let g = 0;
if (c < 1) {
g = (v - c) / (1 - c);
}
return [hwb[0], c * 100, g * 100];
};
convert.apple.rgb = function (apple) {
return [(apple[0] / 65535) * 255, (apple[1] / 65535) * 255, (apple[2] / 65535) * 255];
};
convert.rgb.apple = function (rgb) {
return [(rgb[0] / 255) * 65535, (rgb[1] / 255) * 65535, (rgb[2] / 255) * 65535];
};
convert.gray.rgb = function (args) {
return [args[0] / 100 * 255, args[0] / 100 * 255, args[0] / 100 * 255];
};
convert.gray.hsl = function (args) {
return [0, 0, args[0]];
};
convert.gray.hsv = convert.gray.hsl;
convert.gray.hwb = function (gray) {
return [0, 100, gray[0]];
};
convert.gray.cmyk = function (gray) {
return [0, 0, 0, gray[0]];
};
convert.gray.lab = function (gray) {
return [gray[0], 0, 0];
};
convert.gray.hex = function (gray) {
const val = Math.round(gray[0] / 100 * 255) & 0xFF;
const integer = (val << 16) + (val << 8) + val;
const string = integer.toString(16).toUpperCase();
return '000000'.substring(string.length) + string;
};
convert.rgb.gray = function (rgb) {
const val = (rgb[0] + rgb[1] + rgb[2]) / 3;
return [val / 255 * 100];
};

81
node_modules/color-convert/index.js generated vendored Normal file
View File

@@ -0,0 +1,81 @@
const conversions = require('./conversions');
const route = require('./route');
const convert = {};
const models = Object.keys(conversions);
function wrapRaw(fn) {
const wrappedFn = function (...args) {
const arg0 = args[0];
if (arg0 === undefined || arg0 === null) {
return arg0;
}
if (arg0.length > 1) {
args = arg0;
}
return fn(args);
};
// Preserve .conversion property if there is one
if ('conversion' in fn) {
wrappedFn.conversion = fn.conversion;
}
return wrappedFn;
}
function wrapRounded(fn) {
const wrappedFn = function (...args) {
const arg0 = args[0];
if (arg0 === undefined || arg0 === null) {
return arg0;
}
if (arg0.length > 1) {
args = arg0;
}
const result = fn(args);
// We're assuming the result is an array here.
// see notice in conversions.js; don't use box types
// in conversion functions.
if (typeof result === 'object') {
for (let len = result.length, i = 0; i < len; i++) {
result[i] = Math.round(result[i]);
}
}
return result;
};
// Preserve .conversion property if there is one
if ('conversion' in fn) {
wrappedFn.conversion = fn.conversion;
}
return wrappedFn;
}
models.forEach(fromModel => {
convert[fromModel] = {};
Object.defineProperty(convert[fromModel], 'channels', {value: conversions[fromModel].channels});
Object.defineProperty(convert[fromModel], 'labels', {value: conversions[fromModel].labels});
const routes = route(fromModel);
const routeModels = Object.keys(routes);
routeModels.forEach(toModel => {
const fn = routes[toModel];
convert[fromModel][toModel] = wrapRounded(fn);
convert[fromModel][toModel].raw = wrapRaw(fn);
});
});
module.exports = convert;

48
node_modules/color-convert/package.json generated vendored Normal file
View File

@@ -0,0 +1,48 @@
{
"name": "color-convert",
"description": "Plain color conversion functions",
"version": "2.0.1",
"author": "Heather Arthur <fayearthur@gmail.com>",
"license": "MIT",
"repository": "Qix-/color-convert",
"scripts": {
"pretest": "xo",
"test": "node test/basic.js"
},
"engines": {
"node": ">=7.0.0"
},
"keywords": [
"color",
"colour",
"convert",
"converter",
"conversion",
"rgb",
"hsl",
"hsv",
"hwb",
"cmyk",
"ansi",
"ansi16"
],
"files": [
"index.js",
"conversions.js",
"route.js"
],
"xo": {
"rules": {
"default-case": 0,
"no-inline-comments": 0,
"operator-linebreak": 0
}
},
"devDependencies": {
"chalk": "^2.4.2",
"xo": "^0.24.0"
},
"dependencies": {
"color-name": "~1.1.4"
}
}

97
node_modules/color-convert/route.js generated vendored Normal file
View File

@@ -0,0 +1,97 @@
const conversions = require('./conversions');
/*
This function routes a model to all other models.
all functions that are routed have a property `.conversion` attached
to the returned synthetic function. This property is an array
of strings, each with the steps in between the 'from' and 'to'
color models (inclusive).
conversions that are not possible simply are not included.
*/
function buildGraph() {
const graph = {};
// https://jsperf.com/object-keys-vs-for-in-with-closure/3
const models = Object.keys(conversions);
for (let len = models.length, i = 0; i < len; i++) {
graph[models[i]] = {
// http://jsperf.com/1-vs-infinity
// micro-opt, but this is simple.
distance: -1,
parent: null
};
}
return graph;
}
// https://en.wikipedia.org/wiki/Breadth-first_search
function deriveBFS(fromModel) {
const graph = buildGraph();
const queue = [fromModel]; // Unshift -> queue -> pop
graph[fromModel].distance = 0;
while (queue.length) {
const current = queue.pop();
const adjacents = Object.keys(conversions[current]);
for (let len = adjacents.length, i = 0; i < len; i++) {
const adjacent = adjacents[i];
const node = graph[adjacent];
if (node.distance === -1) {
node.distance = graph[current].distance + 1;
node.parent = current;
queue.unshift(adjacent);
}
}
}
return graph;
}
function link(from, to) {
return function (args) {
return to(from(args));
};
}
function wrapConversion(toModel, graph) {
const path = [graph[toModel].parent, toModel];
let fn = conversions[graph[toModel].parent][toModel];
let cur = graph[toModel].parent;
while (graph[cur].parent) {
path.unshift(graph[cur].parent);
fn = link(conversions[graph[cur].parent][cur], fn);
cur = graph[cur].parent;
}
fn.conversion = path;
return fn;
}
module.exports = function (fromModel) {
const graph = deriveBFS(fromModel);
const conversion = {};
const models = Object.keys(graph);
for (let len = models.length, i = 0; i < len; i++) {
const toModel = models[i];
const node = graph[toModel];
if (node.parent === null) {
// No possible conversion, or this node is the source model.
continue;
}
conversion[toModel] = wrapConversion(toModel, graph);
}
return conversion;
};

8
node_modules/color-name/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,8 @@
The MIT License (MIT)
Copyright (c) 2015 Dmitry Ivanov
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

11
node_modules/color-name/README.md generated vendored Normal file
View File

@@ -0,0 +1,11 @@
A JSON with color names and its values. Based on http://dev.w3.org/csswg/css-color/#named-colors.
[![NPM](https://nodei.co/npm/color-name.png?mini=true)](https://nodei.co/npm/color-name/)
```js
var colors = require('color-name');
colors.red //[255,0,0]
```
<a href="LICENSE"><img src="https://upload.wikimedia.org/wikipedia/commons/0/0c/MIT_logo.svg" width="120"/></a>

152
node_modules/color-name/index.js generated vendored Normal file
View File

@@ -0,0 +1,152 @@
'use strict'
module.exports = {
"aliceblue": [240, 248, 255],
"antiquewhite": [250, 235, 215],
"aqua": [0, 255, 255],
"aquamarine": [127, 255, 212],
"azure": [240, 255, 255],
"beige": [245, 245, 220],
"bisque": [255, 228, 196],
"black": [0, 0, 0],
"blanchedalmond": [255, 235, 205],
"blue": [0, 0, 255],
"blueviolet": [138, 43, 226],
"brown": [165, 42, 42],
"burlywood": [222, 184, 135],
"cadetblue": [95, 158, 160],
"chartreuse": [127, 255, 0],
"chocolate": [210, 105, 30],
"coral": [255, 127, 80],
"cornflowerblue": [100, 149, 237],
"cornsilk": [255, 248, 220],
"crimson": [220, 20, 60],
"cyan": [0, 255, 255],
"darkblue": [0, 0, 139],
"darkcyan": [0, 139, 139],
"darkgoldenrod": [184, 134, 11],
"darkgray": [169, 169, 169],
"darkgreen": [0, 100, 0],
"darkgrey": [169, 169, 169],
"darkkhaki": [189, 183, 107],
"darkmagenta": [139, 0, 139],
"darkolivegreen": [85, 107, 47],
"darkorange": [255, 140, 0],
"darkorchid": [153, 50, 204],
"darkred": [139, 0, 0],
"darksalmon": [233, 150, 122],
"darkseagreen": [143, 188, 143],
"darkslateblue": [72, 61, 139],
"darkslategray": [47, 79, 79],
"darkslategrey": [47, 79, 79],
"darkturquoise": [0, 206, 209],
"darkviolet": [148, 0, 211],
"deeppink": [255, 20, 147],
"deepskyblue": [0, 191, 255],
"dimgray": [105, 105, 105],
"dimgrey": [105, 105, 105],
"dodgerblue": [30, 144, 255],
"firebrick": [178, 34, 34],
"floralwhite": [255, 250, 240],
"forestgreen": [34, 139, 34],
"fuchsia": [255, 0, 255],
"gainsboro": [220, 220, 220],
"ghostwhite": [248, 248, 255],
"gold": [255, 215, 0],
"goldenrod": [218, 165, 32],
"gray": [128, 128, 128],
"green": [0, 128, 0],
"greenyellow": [173, 255, 47],
"grey": [128, 128, 128],
"honeydew": [240, 255, 240],
"hotpink": [255, 105, 180],
"indianred": [205, 92, 92],
"indigo": [75, 0, 130],
"ivory": [255, 255, 240],
"khaki": [240, 230, 140],
"lavender": [230, 230, 250],
"lavenderblush": [255, 240, 245],
"lawngreen": [124, 252, 0],
"lemonchiffon": [255, 250, 205],
"lightblue": [173, 216, 230],
"lightcoral": [240, 128, 128],
"lightcyan": [224, 255, 255],
"lightgoldenrodyellow": [250, 250, 210],
"lightgray": [211, 211, 211],
"lightgreen": [144, 238, 144],
"lightgrey": [211, 211, 211],
"lightpink": [255, 182, 193],
"lightsalmon": [255, 160, 122],
"lightseagreen": [32, 178, 170],
"lightskyblue": [135, 206, 250],
"lightslategray": [119, 136, 153],
"lightslategrey": [119, 136, 153],
"lightsteelblue": [176, 196, 222],
"lightyellow": [255, 255, 224],
"lime": [0, 255, 0],
"limegreen": [50, 205, 50],
"linen": [250, 240, 230],
"magenta": [255, 0, 255],
"maroon": [128, 0, 0],
"mediumaquamarine": [102, 205, 170],
"mediumblue": [0, 0, 205],
"mediumorchid": [186, 85, 211],
"mediumpurple": [147, 112, 219],
"mediumseagreen": [60, 179, 113],
"mediumslateblue": [123, 104, 238],
"mediumspringgreen": [0, 250, 154],
"mediumturquoise": [72, 209, 204],
"mediumvioletred": [199, 21, 133],
"midnightblue": [25, 25, 112],
"mintcream": [245, 255, 250],
"mistyrose": [255, 228, 225],
"moccasin": [255, 228, 181],
"navajowhite": [255, 222, 173],
"navy": [0, 0, 128],
"oldlace": [253, 245, 230],
"olive": [128, 128, 0],
"olivedrab": [107, 142, 35],
"orange": [255, 165, 0],
"orangered": [255, 69, 0],
"orchid": [218, 112, 214],
"palegoldenrod": [238, 232, 170],
"palegreen": [152, 251, 152],
"paleturquoise": [175, 238, 238],
"palevioletred": [219, 112, 147],
"papayawhip": [255, 239, 213],
"peachpuff": [255, 218, 185],
"peru": [205, 133, 63],
"pink": [255, 192, 203],
"plum": [221, 160, 221],
"powderblue": [176, 224, 230],
"purple": [128, 0, 128],
"rebeccapurple": [102, 51, 153],
"red": [255, 0, 0],
"rosybrown": [188, 143, 143],
"royalblue": [65, 105, 225],
"saddlebrown": [139, 69, 19],
"salmon": [250, 128, 114],
"sandybrown": [244, 164, 96],
"seagreen": [46, 139, 87],
"seashell": [255, 245, 238],
"sienna": [160, 82, 45],
"silver": [192, 192, 192],
"skyblue": [135, 206, 235],
"slateblue": [106, 90, 205],
"slategray": [112, 128, 144],
"slategrey": [112, 128, 144],
"snow": [255, 250, 250],
"springgreen": [0, 255, 127],
"steelblue": [70, 130, 180],
"tan": [210, 180, 140],
"teal": [0, 128, 128],
"thistle": [216, 191, 216],
"tomato": [255, 99, 71],
"turquoise": [64, 224, 208],
"violet": [238, 130, 238],
"wheat": [245, 222, 179],
"white": [255, 255, 255],
"whitesmoke": [245, 245, 245],
"yellow": [255, 255, 0],
"yellowgreen": [154, 205, 50]
};

28
node_modules/color-name/package.json generated vendored Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "color-name",
"version": "1.1.4",
"description": "A list of color names and its values",
"main": "index.js",
"files": [
"index.js"
],
"scripts": {
"test": "node test.js"
},
"repository": {
"type": "git",
"url": "git@github.com:colorjs/color-name.git"
},
"keywords": [
"color-name",
"color",
"color-keyword",
"keyword"
],
"author": "DY <dfcreative@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/colorjs/color-name/issues"
},
"homepage": "https://github.com/colorjs/color-name"
}

21
node_modules/color-string/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
Copyright (c) 2011 Heather Arthur <fayearthur@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

62
node_modules/color-string/README.md generated vendored Normal file
View File

@@ -0,0 +1,62 @@
# color-string
> library for parsing and generating CSS color strings.
## Install
With [npm](http://npmjs.org/):
```console
$ npm install color-string
```
## Usage
### Parsing
```js
colorString.get('#FFF') // {model: 'rgb', value: [255, 255, 255, 1]}
colorString.get('#FFFA') // {model: 'rgb', value: [255, 255, 255, 0.67]}
colorString.get('#FFFFFFAA') // {model: 'rgb', value: [255, 255, 255, 0.67]}
colorString.get('hsl(360, 100%, 50%)') // {model: 'hsl', value: [0, 100, 50, 1]}
colorString.get('hsl(360 100% 50%)') // {model: 'hsl', value: [0, 100, 50, 1]}
colorString.get('hwb(60, 3%, 60%)') // {model: 'hwb', value: [60, 3, 60, 1]}
colorString.get.rgb('#FFF') // [255, 255, 255, 1]
colorString.get.rgb('blue') // [0, 0, 255, 1]
colorString.get.rgb('rgba(200, 60, 60, 0.3)') // [200, 60, 60, 0.3]
colorString.get.rgb('rgba(200 60 60 / 0.3)') // [200, 60, 60, 0.3]
colorString.get.rgb('rgba(200 60 60 / 30%)') // [200, 60, 60, 0.3]
colorString.get.rgb('rgb(200, 200, 200)') // [200, 200, 200, 1]
colorString.get.rgb('rgb(200 200 200)') // [200, 200, 200, 1]
colorString.get.hsl('hsl(360, 100%, 50%)') // [0, 100, 50, 1]
colorString.get.hsl('hsl(360 100% 50%)') // [0, 100, 50, 1]
colorString.get.hsl('hsla(360, 60%, 50%, 0.4)') // [0, 60, 50, 0.4]
colorString.get.hsl('hsl(360 60% 50% / 0.4)') // [0, 60, 50, 0.4]
colorString.get.hwb('hwb(60, 3%, 60%)') // [60, 3, 60, 1]
colorString.get.hwb('hwb(60, 3%, 60%, 0.6)') // [60, 3, 60, 0.6]
colorString.get.rgb('invalid color string') // null
```
### Generation
```js
colorString.to.hex([255, 255, 255]) // "#FFFFFF"
colorString.to.hex([0, 0, 255, 0.4]) // "#0000FF66"
colorString.to.hex([0, 0, 255], 0.4) // "#0000FF66"
colorString.to.rgb([255, 255, 255]) // "rgb(255, 255, 255)"
colorString.to.rgb([0, 0, 255, 0.4]) // "rgba(0, 0, 255, 0.4)"
colorString.to.rgb([0, 0, 255], 0.4) // "rgba(0, 0, 255, 0.4)"
colorString.to.rgb.percent([0, 0, 255]) // "rgb(0%, 0%, 100%)"
colorString.to.keyword([255, 255, 0]) // "yellow"
colorString.to.hsl([360, 100, 100]) // "hsl(360, 100%, 100%)"
colorString.to.hwb([50, 3, 15]) // "hwb(50, 3%, 15%)"
// all functions also support swizzling
colorString.to.rgb(0, [0, 255], 0.4) // "rgba(0, 0, 255, 0.4)"
colorString.to.rgb([0, 0], [255], 0.4) // "rgba(0, 0, 255, 0.4)"
colorString.to.rgb([0], 0, [255, 0.4]) // "rgba(0, 0, 255, 0.4)"
```

242
node_modules/color-string/index.js generated vendored Normal file
View File

@@ -0,0 +1,242 @@
/* MIT license */
var colorNames = require('color-name');
var swizzle = require('simple-swizzle');
var hasOwnProperty = Object.hasOwnProperty;
var reverseNames = Object.create(null);
// create a list of reverse color names
for (var name in colorNames) {
if (hasOwnProperty.call(colorNames, name)) {
reverseNames[colorNames[name]] = name;
}
}
var cs = module.exports = {
to: {},
get: {}
};
cs.get = function (string) {
var prefix = string.substring(0, 3).toLowerCase();
var val;
var model;
switch (prefix) {
case 'hsl':
val = cs.get.hsl(string);
model = 'hsl';
break;
case 'hwb':
val = cs.get.hwb(string);
model = 'hwb';
break;
default:
val = cs.get.rgb(string);
model = 'rgb';
break;
}
if (!val) {
return null;
}
return {model: model, value: val};
};
cs.get.rgb = function (string) {
if (!string) {
return null;
}
var abbr = /^#([a-f0-9]{3,4})$/i;
var hex = /^#([a-f0-9]{6})([a-f0-9]{2})?$/i;
var rgba = /^rgba?\(\s*([+-]?\d+)(?=[\s,])\s*(?:,\s*)?([+-]?\d+)(?=[\s,])\s*(?:,\s*)?([+-]?\d+)\s*(?:[,|\/]\s*([+-]?[\d\.]+)(%?)\s*)?\)$/;
var per = /^rgba?\(\s*([+-]?[\d\.]+)\%\s*,?\s*([+-]?[\d\.]+)\%\s*,?\s*([+-]?[\d\.]+)\%\s*(?:[,|\/]\s*([+-]?[\d\.]+)(%?)\s*)?\)$/;
var keyword = /^(\w+)$/;
var rgb = [0, 0, 0, 1];
var match;
var i;
var hexAlpha;
if (match = string.match(hex)) {
hexAlpha = match[2];
match = match[1];
for (i = 0; i < 3; i++) {
// https://jsperf.com/slice-vs-substr-vs-substring-methods-long-string/19
var i2 = i * 2;
rgb[i] = parseInt(match.slice(i2, i2 + 2), 16);
}
if (hexAlpha) {
rgb[3] = parseInt(hexAlpha, 16) / 255;
}
} else if (match = string.match(abbr)) {
match = match[1];
hexAlpha = match[3];
for (i = 0; i < 3; i++) {
rgb[i] = parseInt(match[i] + match[i], 16);
}
if (hexAlpha) {
rgb[3] = parseInt(hexAlpha + hexAlpha, 16) / 255;
}
} else if (match = string.match(rgba)) {
for (i = 0; i < 3; i++) {
rgb[i] = parseInt(match[i + 1], 0);
}
if (match[4]) {
if (match[5]) {
rgb[3] = parseFloat(match[4]) * 0.01;
} else {
rgb[3] = parseFloat(match[4]);
}
}
} else if (match = string.match(per)) {
for (i = 0; i < 3; i++) {
rgb[i] = Math.round(parseFloat(match[i + 1]) * 2.55);
}
if (match[4]) {
if (match[5]) {
rgb[3] = parseFloat(match[4]) * 0.01;
} else {
rgb[3] = parseFloat(match[4]);
}
}
} else if (match = string.match(keyword)) {
if (match[1] === 'transparent') {
return [0, 0, 0, 0];
}
if (!hasOwnProperty.call(colorNames, match[1])) {
return null;
}
rgb = colorNames[match[1]];
rgb[3] = 1;
return rgb;
} else {
return null;
}
for (i = 0; i < 3; i++) {
rgb[i] = clamp(rgb[i], 0, 255);
}
rgb[3] = clamp(rgb[3], 0, 1);
return rgb;
};
cs.get.hsl = function (string) {
if (!string) {
return null;
}
var hsl = /^hsla?\(\s*([+-]?(?:\d{0,3}\.)?\d+)(?:deg)?\s*,?\s*([+-]?[\d\.]+)%\s*,?\s*([+-]?[\d\.]+)%\s*(?:[,|\/]\s*([+-]?(?=\.\d|\d)(?:0|[1-9]\d*)?(?:\.\d*)?(?:[eE][+-]?\d+)?)\s*)?\)$/;
var match = string.match(hsl);
if (match) {
var alpha = parseFloat(match[4]);
var h = ((parseFloat(match[1]) % 360) + 360) % 360;
var s = clamp(parseFloat(match[2]), 0, 100);
var l = clamp(parseFloat(match[3]), 0, 100);
var a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1);
return [h, s, l, a];
}
return null;
};
cs.get.hwb = function (string) {
if (!string) {
return null;
}
var hwb = /^hwb\(\s*([+-]?\d{0,3}(?:\.\d+)?)(?:deg)?\s*,\s*([+-]?[\d\.]+)%\s*,\s*([+-]?[\d\.]+)%\s*(?:,\s*([+-]?(?=\.\d|\d)(?:0|[1-9]\d*)?(?:\.\d*)?(?:[eE][+-]?\d+)?)\s*)?\)$/;
var match = string.match(hwb);
if (match) {
var alpha = parseFloat(match[4]);
var h = ((parseFloat(match[1]) % 360) + 360) % 360;
var w = clamp(parseFloat(match[2]), 0, 100);
var b = clamp(parseFloat(match[3]), 0, 100);
var a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1);
return [h, w, b, a];
}
return null;
};
cs.to.hex = function () {
var rgba = swizzle(arguments);
return (
'#' +
hexDouble(rgba[0]) +
hexDouble(rgba[1]) +
hexDouble(rgba[2]) +
(rgba[3] < 1
? (hexDouble(Math.round(rgba[3] * 255)))
: '')
);
};
cs.to.rgb = function () {
var rgba = swizzle(arguments);
return rgba.length < 4 || rgba[3] === 1
? 'rgb(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ')'
: 'rgba(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ', ' + rgba[3] + ')';
};
cs.to.rgb.percent = function () {
var rgba = swizzle(arguments);
var r = Math.round(rgba[0] / 255 * 100);
var g = Math.round(rgba[1] / 255 * 100);
var b = Math.round(rgba[2] / 255 * 100);
return rgba.length < 4 || rgba[3] === 1
? 'rgb(' + r + '%, ' + g + '%, ' + b + '%)'
: 'rgba(' + r + '%, ' + g + '%, ' + b + '%, ' + rgba[3] + ')';
};
cs.to.hsl = function () {
var hsla = swizzle(arguments);
return hsla.length < 4 || hsla[3] === 1
? 'hsl(' + hsla[0] + ', ' + hsla[1] + '%, ' + hsla[2] + '%)'
: 'hsla(' + hsla[0] + ', ' + hsla[1] + '%, ' + hsla[2] + '%, ' + hsla[3] + ')';
};
// hwb is a bit different than rgb(a) & hsl(a) since there is no alpha specific syntax
// (hwb have alpha optional & 1 is default value)
cs.to.hwb = function () {
var hwba = swizzle(arguments);
var a = '';
if (hwba.length >= 4 && hwba[3] !== 1) {
a = ', ' + hwba[3];
}
return 'hwb(' + hwba[0] + ', ' + hwba[1] + '%, ' + hwba[2] + '%' + a + ')';
};
cs.to.keyword = function (rgb) {
return reverseNames[rgb.slice(0, 3)];
};
// helpers
function clamp(num, min, max) {
return Math.min(Math.max(min, num), max);
}
function hexDouble(num) {
var str = Math.round(num).toString(16).toUpperCase();
return (str.length < 2) ? '0' + str : str;
}

39
node_modules/color-string/package.json generated vendored Normal file
View File

@@ -0,0 +1,39 @@
{
"name": "color-string",
"description": "Parser and generator for CSS color strings",
"version": "1.9.1",
"author": "Heather Arthur <fayearthur@gmail.com>",
"contributors": [
"Maxime Thirouin",
"Dyma Ywanov <dfcreative@gmail.com>",
"Josh Junon"
],
"repository": "Qix-/color-string",
"scripts": {
"pretest": "xo",
"test": "node test/basic.js"
},
"license": "MIT",
"files": [
"index.js"
],
"xo": {
"rules": {
"no-cond-assign": 0,
"operator-linebreak": 0
}
},
"dependencies": {
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
},
"devDependencies": {
"xo": "^0.12.1"
},
"keywords": [
"color",
"colour",
"rgb",
"css"
]
}

21
node_modules/color/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
Copyright (c) 2012 Heather Arthur
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

123
node_modules/color/README.md generated vendored Normal file
View File

@@ -0,0 +1,123 @@
# color
> JavaScript library for immutable color conversion and manipulation with support for CSS color strings.
```js
const color = Color('#7743CE').alpha(0.5).lighten(0.5);
console.log(color.hsl().string()); // 'hsla(262, 59%, 81%, 0.5)'
console.log(color.cmyk().round().array()); // [ 16, 25, 0, 8, 0.5 ]
console.log(color.ansi256().object()); // { ansi256: 183, alpha: 0.5 }
```
## Install
```console
$ npm install color
```
## Usage
```js
const Color = require('color');
```
### Constructors
```js
const color = Color('rgb(255, 255, 255)')
const color = Color({r: 255, g: 255, b: 255})
const color = Color.rgb(255, 255, 255)
const color = Color.rgb([255, 255, 255])
```
Set the values for individual channels with `alpha`, `red`, `green`, `blue`, `hue`, `saturationl` (hsl), `saturationv` (hsv), `lightness`, `whiteness`, `blackness`, `cyan`, `magenta`, `yellow`, `black`
String constructors are handled by [color-string](https://www.npmjs.com/package/color-string)
### Getters
```js
color.hsl();
```
Convert a color to a different space (`hsl()`, `cmyk()`, etc.).
```js
color.object(); // {r: 255, g: 255, b: 255}
```
Get a hash of the color value. Reflects the color's current model (see above).
```js
color.rgb().array() // [255, 255, 255]
```
Get an array of the values with `array()`. Reflects the color's current model (see above).
```js
color.rgbNumber() // 16777215 (0xffffff)
```
Get the rgb number value.
```js
color.hex() // #ffffff
```
Get the hex value. (**NOTE:** `.hex()` does not return alpha values; use `.hexa()` for an RGBA representation)
```js
color.red() // 255
```
Get the value for an individual channel.
### CSS Strings
```js
color.hsl().string() // 'hsl(320, 50%, 100%)'
```
Calling `.string()` with a number rounds the numbers to that decimal place. It defaults to 1.
### Luminosity
```js
color.luminosity(); // 0.412
```
The [WCAG luminosity](http://www.w3.org/TR/WCAG20/#relativeluminancedef) of the color. 0 is black, 1 is white.
```js
color.contrast(Color("blue")) // 12
```
The [WCAG contrast ratio](http://www.w3.org/TR/WCAG20/#contrast-ratiodef) to another color, from 1 (same color) to 21 (contrast b/w white and black).
```js
color.isLight(); // true
color.isDark(); // false
```
Get whether the color is "light" or "dark", useful for deciding text color.
### Manipulation
```js
color.negate() // rgb(0, 100, 255) -> rgb(255, 155, 0)
color.lighten(0.5) // hsl(100, 50%, 50%) -> hsl(100, 50%, 75%)
color.lighten(0.5) // hsl(100, 50%, 0) -> hsl(100, 50%, 0)
color.darken(0.5) // hsl(100, 50%, 50%) -> hsl(100, 50%, 25%)
color.darken(0.5) // hsl(100, 50%, 0) -> hsl(100, 50%, 0)
color.lightness(50) // hsl(100, 50%, 10%) -> hsl(100, 50%, 50%)
color.saturate(0.5) // hsl(100, 50%, 50%) -> hsl(100, 75%, 50%)
color.desaturate(0.5) // hsl(100, 50%, 50%) -> hsl(100, 25%, 50%)
color.grayscale() // #5CBF54 -> #969696
color.whiten(0.5) // hwb(100, 50%, 50%) -> hwb(100, 75%, 50%)
color.blacken(0.5) // hwb(100, 50%, 50%) -> hwb(100, 50%, 75%)
color.fade(0.5) // rgba(10, 10, 10, 0.8) -> rgba(10, 10, 10, 0.4)
color.opaquer(0.5) // rgba(10, 10, 10, 0.8) -> rgba(10, 10, 10, 1.0)
color.rotate(180) // hsl(60, 20%, 20%) -> hsl(240, 20%, 20%)
color.rotate(-90) // hsl(60, 20%, 20%) -> hsl(330, 20%, 20%)
color.mix(Color("yellow")) // cyan -> rgb(128, 255, 128)
color.mix(Color("yellow"), 0.3) // cyan -> rgb(77, 255, 179)
// chaining
color.green(100).grayscale().lighten(0.6)
```
## Propers
The API was inspired by [color-js](https://github.com/brehaut/color-js). Manipulation functions by CSS tools like Sass, LESS, and Stylus.

496
node_modules/color/index.js generated vendored Normal file
View File

@@ -0,0 +1,496 @@
const colorString = require('color-string');
const convert = require('color-convert');
const skippedModels = [
// To be honest, I don't really feel like keyword belongs in color convert, but eh.
'keyword',
// Gray conflicts with some method names, and has its own method defined.
'gray',
// Shouldn't really be in color-convert either...
'hex',
];
const hashedModelKeys = {};
for (const model of Object.keys(convert)) {
hashedModelKeys[[...convert[model].labels].sort().join('')] = model;
}
const limiters = {};
function Color(object, model) {
if (!(this instanceof Color)) {
return new Color(object, model);
}
if (model && model in skippedModels) {
model = null;
}
if (model && !(model in convert)) {
throw new Error('Unknown model: ' + model);
}
let i;
let channels;
if (object == null) { // eslint-disable-line no-eq-null,eqeqeq
this.model = 'rgb';
this.color = [0, 0, 0];
this.valpha = 1;
} else if (object instanceof Color) {
this.model = object.model;
this.color = [...object.color];
this.valpha = object.valpha;
} else if (typeof object === 'string') {
const result = colorString.get(object);
if (result === null) {
throw new Error('Unable to parse color from string: ' + object);
}
this.model = result.model;
channels = convert[this.model].channels;
this.color = result.value.slice(0, channels);
this.valpha = typeof result.value[channels] === 'number' ? result.value[channels] : 1;
} else if (object.length > 0) {
this.model = model || 'rgb';
channels = convert[this.model].channels;
const newArray = Array.prototype.slice.call(object, 0, channels);
this.color = zeroArray(newArray, channels);
this.valpha = typeof object[channels] === 'number' ? object[channels] : 1;
} else if (typeof object === 'number') {
// This is always RGB - can be converted later on.
this.model = 'rgb';
this.color = [
(object >> 16) & 0xFF,
(object >> 8) & 0xFF,
object & 0xFF,
];
this.valpha = 1;
} else {
this.valpha = 1;
const keys = Object.keys(object);
if ('alpha' in object) {
keys.splice(keys.indexOf('alpha'), 1);
this.valpha = typeof object.alpha === 'number' ? object.alpha : 0;
}
const hashedKeys = keys.sort().join('');
if (!(hashedKeys in hashedModelKeys)) {
throw new Error('Unable to parse color from object: ' + JSON.stringify(object));
}
this.model = hashedModelKeys[hashedKeys];
const {labels} = convert[this.model];
const color = [];
for (i = 0; i < labels.length; i++) {
color.push(object[labels[i]]);
}
this.color = zeroArray(color);
}
// Perform limitations (clamping, etc.)
if (limiters[this.model]) {
channels = convert[this.model].channels;
for (i = 0; i < channels; i++) {
const limit = limiters[this.model][i];
if (limit) {
this.color[i] = limit(this.color[i]);
}
}
}
this.valpha = Math.max(0, Math.min(1, this.valpha));
if (Object.freeze) {
Object.freeze(this);
}
}
Color.prototype = {
toString() {
return this.string();
},
toJSON() {
return this[this.model]();
},
string(places) {
let self = this.model in colorString.to ? this : this.rgb();
self = self.round(typeof places === 'number' ? places : 1);
const args = self.valpha === 1 ? self.color : [...self.color, this.valpha];
return colorString.to[self.model](args);
},
percentString(places) {
const self = this.rgb().round(typeof places === 'number' ? places : 1);
const args = self.valpha === 1 ? self.color : [...self.color, this.valpha];
return colorString.to.rgb.percent(args);
},
array() {
return this.valpha === 1 ? [...this.color] : [...this.color, this.valpha];
},
object() {
const result = {};
const {channels} = convert[this.model];
const {labels} = convert[this.model];
for (let i = 0; i < channels; i++) {
result[labels[i]] = this.color[i];
}
if (this.valpha !== 1) {
result.alpha = this.valpha;
}
return result;
},
unitArray() {
const rgb = this.rgb().color;
rgb[0] /= 255;
rgb[1] /= 255;
rgb[2] /= 255;
if (this.valpha !== 1) {
rgb.push(this.valpha);
}
return rgb;
},
unitObject() {
const rgb = this.rgb().object();
rgb.r /= 255;
rgb.g /= 255;
rgb.b /= 255;
if (this.valpha !== 1) {
rgb.alpha = this.valpha;
}
return rgb;
},
round(places) {
places = Math.max(places || 0, 0);
return new Color([...this.color.map(roundToPlace(places)), this.valpha], this.model);
},
alpha(value) {
if (value !== undefined) {
return new Color([...this.color, Math.max(0, Math.min(1, value))], this.model);
}
return this.valpha;
},
// Rgb
red: getset('rgb', 0, maxfn(255)),
green: getset('rgb', 1, maxfn(255)),
blue: getset('rgb', 2, maxfn(255)),
hue: getset(['hsl', 'hsv', 'hsl', 'hwb', 'hcg'], 0, value => ((value % 360) + 360) % 360),
saturationl: getset('hsl', 1, maxfn(100)),
lightness: getset('hsl', 2, maxfn(100)),
saturationv: getset('hsv', 1, maxfn(100)),
value: getset('hsv', 2, maxfn(100)),
chroma: getset('hcg', 1, maxfn(100)),
gray: getset('hcg', 2, maxfn(100)),
white: getset('hwb', 1, maxfn(100)),
wblack: getset('hwb', 2, maxfn(100)),
cyan: getset('cmyk', 0, maxfn(100)),
magenta: getset('cmyk', 1, maxfn(100)),
yellow: getset('cmyk', 2, maxfn(100)),
black: getset('cmyk', 3, maxfn(100)),
x: getset('xyz', 0, maxfn(95.047)),
y: getset('xyz', 1, maxfn(100)),
z: getset('xyz', 2, maxfn(108.833)),
l: getset('lab', 0, maxfn(100)),
a: getset('lab', 1),
b: getset('lab', 2),
keyword(value) {
if (value !== undefined) {
return new Color(value);
}
return convert[this.model].keyword(this.color);
},
hex(value) {
if (value !== undefined) {
return new Color(value);
}
return colorString.to.hex(this.rgb().round().color);
},
hexa(value) {
if (value !== undefined) {
return new Color(value);
}
const rgbArray = this.rgb().round().color;
let alphaHex = Math.round(this.valpha * 255).toString(16).toUpperCase();
if (alphaHex.length === 1) {
alphaHex = '0' + alphaHex;
}
return colorString.to.hex(rgbArray) + alphaHex;
},
rgbNumber() {
const rgb = this.rgb().color;
return ((rgb[0] & 0xFF) << 16) | ((rgb[1] & 0xFF) << 8) | (rgb[2] & 0xFF);
},
luminosity() {
// http://www.w3.org/TR/WCAG20/#relativeluminancedef
const rgb = this.rgb().color;
const lum = [];
for (const [i, element] of rgb.entries()) {
const chan = element / 255;
lum[i] = (chan <= 0.04045) ? chan / 12.92 : ((chan + 0.055) / 1.055) ** 2.4;
}
return 0.2126 * lum[0] + 0.7152 * lum[1] + 0.0722 * lum[2];
},
contrast(color2) {
// http://www.w3.org/TR/WCAG20/#contrast-ratiodef
const lum1 = this.luminosity();
const lum2 = color2.luminosity();
if (lum1 > lum2) {
return (lum1 + 0.05) / (lum2 + 0.05);
}
return (lum2 + 0.05) / (lum1 + 0.05);
},
level(color2) {
// https://www.w3.org/TR/WCAG/#contrast-enhanced
const contrastRatio = this.contrast(color2);
if (contrastRatio >= 7) {
return 'AAA';
}
return (contrastRatio >= 4.5) ? 'AA' : '';
},
isDark() {
// YIQ equation from http://24ways.org/2010/calculating-color-contrast
const rgb = this.rgb().color;
const yiq = (rgb[0] * 2126 + rgb[1] * 7152 + rgb[2] * 722) / 10000;
return yiq < 128;
},
isLight() {
return !this.isDark();
},
negate() {
const rgb = this.rgb();
for (let i = 0; i < 3; i++) {
rgb.color[i] = 255 - rgb.color[i];
}
return rgb;
},
lighten(ratio) {
const hsl = this.hsl();
hsl.color[2] += hsl.color[2] * ratio;
return hsl;
},
darken(ratio) {
const hsl = this.hsl();
hsl.color[2] -= hsl.color[2] * ratio;
return hsl;
},
saturate(ratio) {
const hsl = this.hsl();
hsl.color[1] += hsl.color[1] * ratio;
return hsl;
},
desaturate(ratio) {
const hsl = this.hsl();
hsl.color[1] -= hsl.color[1] * ratio;
return hsl;
},
whiten(ratio) {
const hwb = this.hwb();
hwb.color[1] += hwb.color[1] * ratio;
return hwb;
},
blacken(ratio) {
const hwb = this.hwb();
hwb.color[2] += hwb.color[2] * ratio;
return hwb;
},
grayscale() {
// http://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale
const rgb = this.rgb().color;
const value = rgb[0] * 0.3 + rgb[1] * 0.59 + rgb[2] * 0.11;
return Color.rgb(value, value, value);
},
fade(ratio) {
return this.alpha(this.valpha - (this.valpha * ratio));
},
opaquer(ratio) {
return this.alpha(this.valpha + (this.valpha * ratio));
},
rotate(degrees) {
const hsl = this.hsl();
let hue = hsl.color[0];
hue = (hue + degrees) % 360;
hue = hue < 0 ? 360 + hue : hue;
hsl.color[0] = hue;
return hsl;
},
mix(mixinColor, weight) {
// Ported from sass implementation in C
// https://github.com/sass/libsass/blob/0e6b4a2850092356aa3ece07c6b249f0221caced/functions.cpp#L209
if (!mixinColor || !mixinColor.rgb) {
throw new Error('Argument to "mix" was not a Color instance, but rather an instance of ' + typeof mixinColor);
}
const color1 = mixinColor.rgb();
const color2 = this.rgb();
const p = weight === undefined ? 0.5 : weight;
const w = 2 * p - 1;
const a = color1.alpha() - color2.alpha();
const w1 = (((w * a === -1) ? w : (w + a) / (1 + w * a)) + 1) / 2;
const w2 = 1 - w1;
return Color.rgb(
w1 * color1.red() + w2 * color2.red(),
w1 * color1.green() + w2 * color2.green(),
w1 * color1.blue() + w2 * color2.blue(),
color1.alpha() * p + color2.alpha() * (1 - p));
},
};
// Model conversion methods and static constructors
for (const model of Object.keys(convert)) {
if (skippedModels.includes(model)) {
continue;
}
const {channels} = convert[model];
// Conversion methods
Color.prototype[model] = function (...args) {
if (this.model === model) {
return new Color(this);
}
if (args.length > 0) {
return new Color(args, model);
}
return new Color([...assertArray(convert[this.model][model].raw(this.color)), this.valpha], model);
};
// 'static' construction methods
Color[model] = function (...args) {
let color = args[0];
if (typeof color === 'number') {
color = zeroArray(args, channels);
}
return new Color(color, model);
};
}
function roundTo(number, places) {
return Number(number.toFixed(places));
}
function roundToPlace(places) {
return function (number) {
return roundTo(number, places);
};
}
function getset(model, channel, modifier) {
model = Array.isArray(model) ? model : [model];
for (const m of model) {
(limiters[m] || (limiters[m] = []))[channel] = modifier;
}
model = model[0];
return function (value) {
let result;
if (value !== undefined) {
if (modifier) {
value = modifier(value);
}
result = this[model]();
result.color[channel] = value;
return result;
}
result = this[model]().color[channel];
if (modifier) {
result = modifier(result);
}
return result;
};
}
function maxfn(max) {
return function (v) {
return Math.max(0, Math.min(max, v));
};
}
function assertArray(value) {
return Array.isArray(value) ? value : [value];
}
function zeroArray(array, length) {
for (let i = 0; i < length; i++) {
if (typeof array[i] !== 'number') {
array[i] = 0;
}
}
return array;
}
module.exports = Color;

47
node_modules/color/package.json generated vendored Normal file
View File

@@ -0,0 +1,47 @@
{
"name": "color",
"version": "4.2.3",
"description": "Color conversion and manipulation with CSS string support",
"sideEffects": false,
"keywords": [
"color",
"colour",
"css"
],
"authors": [
"Josh Junon <josh@junon.me>",
"Heather Arthur <fayearthur@gmail.com>",
"Maxime Thirouin"
],
"license": "MIT",
"repository": "Qix-/color",
"xo": {
"rules": {
"no-cond-assign": 0,
"new-cap": 0,
"unicorn/prefer-module": 0,
"no-mixed-operators": 0,
"complexity": 0,
"unicorn/numeric-separators-style": 0
}
},
"files": [
"LICENSE",
"index.js"
],
"scripts": {
"pretest": "xo",
"test": "mocha"
},
"engines": {
"node": ">=12.5.0"
},
"dependencies": {
"color-convert": "^2.0.1",
"color-string": "^1.9.0"
},
"devDependencies": {
"mocha": "9.0.2",
"xo": "0.42.0"
}
}

17
node_modules/crc-32/.flowconfig generated vendored Normal file
View File

@@ -0,0 +1,17 @@
[ignore]
.*/node_modules/.*
.*/dist/.*
.*/test.js
.*/crc32.js
.*/bits/.*
.*/ctest/.*
.*/misc/.*
.*/perf/.*
[include]
crc32.flow.js
[libs]
[options]

4
node_modules/crc-32/.jshintrc generated vendored Normal file
View File

@@ -0,0 +1,4 @@
{
"bitwise": false,
"curly": false
}

10
node_modules/crc-32/.npmignore generated vendored Normal file
View File

@@ -0,0 +1,10 @@
node_modules
misc/
perf/
bits/
ctest/
test.js
.travis.yml
.jscs.json
perf.txt
Makefile

14
node_modules/crc-32/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,14 @@
Copyright (C) 2014 SheetJS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

65
node_modules/crc-32/README.md generated vendored Normal file
View File

@@ -0,0 +1,65 @@
# crc32
Standard CRC-32 algorithm implementation in JS (for the browser and nodejs).
Emphasis on correctness and performance.
## Installation
With [npm](https://www.npmjs.org/package/crc-32):
$ npm install crc-32
In the browser:
<script src="crc32.js"></script>
The script will manipulate `module.exports` if available (e.g. in a CommonJS
`require` context). This is not always desirable. To prevent the behavior,
define `DO_NOT_EXPORT_CRC`
## Usage
In all cases, the relevant function takes a single argument representing data.
The return value is a signed 32-bit integer.
- `CRC32.buf(byte array or buffer)` assumes the argument is a set of 8-bit
unsigned integers (e.g. nodejs `Buffer` or simple array of ints).
- `CRC32.bstr(binary string)` interprets the argument as a binary string where
the `i`-th byte is the low byte of the UCS-2 char: `str.charCodeAt(i) & 0xFF`
- `CRC32.str(string)` interprets the argument as a standard JS string
For example:
```js
> // var CRC32 = require('crc-32'); // uncomment this line if in node
> CRC32.str("SheetJS") // -1647298270
> CRC32.bstr("SheetJS") // -1647298270
> CRC32.buf([ 83, 104, 101, 101, 116, 74, 83 ]) // -1647298270
> [CRC32.str("\u2603"), CRC32.str("\u0003")] // [ -1743909036, 1259060791 ]
> [CRC32.bstr("\u2603"), CRC32.bstr("\u0003")] // [ 1259060791, 1259060791 ]
> [CRC32.buf([0x2603]), CRC32.buf([0x0003])] // [ 1259060791, 1259060791 ]
```
## Testing
`make test` will run the node-based tests.
To run the in-browser tests, run a local server and go to the `ctest` directory.
To update the browser artifacts, run `make ctest`.
## License
Please consult the attached LICENSE file for details. All rights not explicitly
granted by the Apache 2.0 license are reserved by the Original Author.
## Badges
[![Build Status](https://travis-ci.org/SheetJS/js-crc32.svg?branch=master)](https://travis-ci.org/SheetJS/js-crc32)
[![Coverage Status](http://img.shields.io/coveralls/SheetJS/js-crc32/master.svg)](https://coveralls.io/r/SheetJS/js-crc32?branch=master)
[![Analytics](https://ga-beacon.appspot.com/UA-36810333-1/SheetJS/js-crc32?pixel)](https://github.com/SheetJS/js-crc32)

117
node_modules/crc-32/crc32.flow.js generated vendored Normal file
View File

@@ -0,0 +1,117 @@
/* crc32.js (C) 2014-2015 SheetJS -- http://sheetjs.com */
/* vim: set ts=2: */
var CRC32;
/*:: declare var DO_NOT_EXPORT_CRC: any; */
/*:: declare var define: any; */
(function (factory) {
if(typeof DO_NOT_EXPORT_CRC === 'undefined') {
if('object' === typeof exports) {
factory(exports);
} else if ('function' === typeof define && define.amd) {
define(function () {
var module = {};
factory(module);
return module;
});
} else {
factory(CRC32 = {});
}
} else {
factory(CRC32 = {});
}
}(function(CRC32) {
CRC32.version = '0.3.0';
/*::
type CRC32Type = number;
type ABuf = Array<number> | Buffer;
type CRC32TableType = Array<number> | Int32Array;
*/
/* see perf/crc32table.js */
function signed_crc_table()/*:CRC32TableType*/ {
var c = 0, table/*:Array<number>*/ = new Array(256);
for(var n =0; n != 256; ++n){
c = n;
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
table[n] = c;
}
return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table;
}
var table = signed_crc_table();
/* charCodeAt is the best approach for binary strings */
var use_buffer = typeof Buffer !== 'undefined';
function crc32_bstr(bstr/*:string*/)/*:CRC32Type*/ {
if(bstr.length > 32768) if(use_buffer) return crc32_buf_8(new Buffer(bstr));
var crc = -1, L = bstr.length - 1;
for(var i = 0; i < L;) {
crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8);
crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8);
}
if(i === L) crc = (crc >>> 8) ^ table[(crc ^ bstr.charCodeAt(i)) & 0xFF];
return crc ^ -1;
}
function crc32_buf(buf/*:ABuf*/)/*:CRC32Type*/ {
if(buf.length > 10000) return crc32_buf_8(buf);
for(var crc = -1, i = 0, L=buf.length-3; i < L;) {
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
}
while(i < L+3) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
return crc ^ -1;
}
function crc32_buf_8(buf/*:ABuf*/)/*:CRC32Type*/ {
for(var crc = -1, i = 0, L=buf.length-7; i < L;) {
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
}
while(i < L+7) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
return crc ^ -1;
}
/* much much faster to intertwine utf8 and crc */
function crc32_str(str/*:string*/)/*:CRC32Type*/ {
for(var crc = -1, i = 0, L=str.length, c, d; i < L;) {
c = str.charCodeAt(i++);
if(c < 0x80) {
crc = (crc >>> 8) ^ table[(crc ^ c) & 0xFF];
} else if(c < 0x800) {
crc = (crc >>> 8) ^ table[(crc ^ (192|((c>>6)&31))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF];
} else if(c >= 0xD800 && c < 0xE000) {
c = (c&1023)+64; d = str.charCodeAt(i++) & 1023;
crc = (crc >>> 8) ^ table[(crc ^ (240|((c>>8)&7))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>2)&63))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((d>>6)&15)|(c&3))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(d&63))) & 0xFF];
} else {
crc = (crc >>> 8) ^ table[(crc ^ (224|((c>>12)&15))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>6)&63))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF];
}
}
return crc ^ -1;
}
CRC32.table = table;
CRC32.bstr = crc32_bstr;
CRC32.buf = crc32_buf;
CRC32.str = crc32_str;
}));

110
node_modules/crc-32/crc32.js generated vendored Normal file
View File

@@ -0,0 +1,110 @@
/* crc32.js (C) 2014-2015 SheetJS -- http://sheetjs.com */
/* vim: set ts=2: */
var CRC32;
(function (factory) {
if(typeof DO_NOT_EXPORT_CRC === 'undefined') {
if('object' === typeof exports) {
factory(exports);
} else if ('function' === typeof define && define.amd) {
define(function () {
var module = {};
factory(module);
return module;
});
} else {
factory(CRC32 = {});
}
} else {
factory(CRC32 = {});
}
}(function(CRC32) {
CRC32.version = '0.3.0';
/* see perf/crc32table.js */
function signed_crc_table() {
var c = 0, table = new Array(256);
for(var n =0; n != 256; ++n){
c = n;
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1));
table[n] = c;
}
return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table;
}
var table = signed_crc_table();
/* charCodeAt is the best approach for binary strings */
var use_buffer = typeof Buffer !== 'undefined';
function crc32_bstr(bstr) {
if(bstr.length > 32768) if(use_buffer) return crc32_buf_8(new Buffer(bstr));
var crc = -1, L = bstr.length - 1;
for(var i = 0; i < L;) {
crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8);
crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8);
}
if(i === L) crc = (crc >>> 8) ^ table[(crc ^ bstr.charCodeAt(i)) & 0xFF];
return crc ^ -1;
}
function crc32_buf(buf) {
if(buf.length > 10000) return crc32_buf_8(buf);
for(var crc = -1, i = 0, L=buf.length-3; i < L;) {
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
}
while(i < L+3) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
return crc ^ -1;
}
function crc32_buf_8(buf) {
for(var crc = -1, i = 0, L=buf.length-7; i < L;) {
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
}
while(i < L+7) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF];
return crc ^ -1;
}
/* much much faster to intertwine utf8 and crc */
function crc32_str(str) {
for(var crc = -1, i = 0, L=str.length, c, d; i < L;) {
c = str.charCodeAt(i++);
if(c < 0x80) {
crc = (crc >>> 8) ^ table[(crc ^ c) & 0xFF];
} else if(c < 0x800) {
crc = (crc >>> 8) ^ table[(crc ^ (192|((c>>6)&31))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF];
} else if(c >= 0xD800 && c < 0xE000) {
c = (c&1023)+64; d = str.charCodeAt(i++) & 1023;
crc = (crc >>> 8) ^ table[(crc ^ (240|((c>>8)&7))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>2)&63))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((d>>6)&15)|(c&3))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(d&63))) & 0xFF];
} else {
crc = (crc >>> 8) ^ table[(crc ^ (224|((c>>12)&15))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>6)&63))) & 0xFF];
crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF];
}
}
return crc ^ -1;
}
CRC32.table = table;
CRC32.bstr = crc32_bstr;
CRC32.buf = crc32_buf;
CRC32.str = crc32_str;
}));

25
node_modules/crc-32/package.json generated vendored Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "crc-32",
"version": "0.3.0",
"author": "sheetjs",
"description": "Pure-JS CRC-32",
"keywords": [ "crc32", "checksum", "crc" ],
"main": "./crc32",
"devDependencies": {
"mocha":"",
"uglify-js":""
},
"repository": { "type":"git", "url":"git://github.com/SheetJS/js-crc32.git" },
"scripts": {
"pretest": "git submodule init && git submodule update",
"test": "make test"
},
"config": {
"blanket": {
"pattern": "crc32.js"
}
},
"bugs": { "url": "https://github.com/SheetJS/js-crc32/issues" },
"license": "Apache-2.0",
"engines": { "node": ">=0.8" }
}

22
node_modules/decompress-response/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,22 @@
/// <reference types="node"/>
import {IncomingMessage} from 'http';
/**
Decompress a HTTP response if needed.
@param response - The HTTP incoming stream with compressed data.
@returns The decompressed HTTP response stream.
@example
```
import {http} from 'http';
import decompressResponse = require('decompress-response');
http.get('https://sindresorhus.com', response => {
response = decompressResponse(response);
});
```
*/
declare function decompressResponse(response: IncomingMessage): IncomingMessage;
export = decompressResponse;

58
node_modules/decompress-response/index.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
'use strict';
const {Transform, PassThrough} = require('stream');
const zlib = require('zlib');
const mimicResponse = require('mimic-response');
module.exports = response => {
const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();
if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
return response;
}
// TODO: Remove this when targeting Node.js 12.
const isBrotli = contentEncoding === 'br';
if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {
response.destroy(new Error('Brotli is not supported on Node.js < 12'));
return response;
}
let isEmpty = true;
const checker = new Transform({
transform(data, _encoding, callback) {
isEmpty = false;
callback(null, data);
},
flush(callback) {
callback();
}
});
const finalStream = new PassThrough({
autoDestroy: false,
destroy(error, callback) {
response.destroy();
callback(error);
}
});
const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
decompressStream.once('error', error => {
if (isEmpty && !response.readable) {
finalStream.end();
return;
}
finalStream.destroy(error);
});
mimicResponse(response, finalStream);
response.pipe(checker).pipe(decompressStream).pipe(finalStream);
return finalStream;
};

9
node_modules/decompress-response/license generated vendored Normal file
View File

@@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

56
node_modules/decompress-response/package.json generated vendored Normal file
View File

@@ -0,0 +1,56 @@
{
"name": "decompress-response",
"version": "6.0.0",
"description": "Decompress a HTTP response if needed",
"license": "MIT",
"repository": "sindresorhus/decompress-response",
"funding": "https://github.com/sponsors/sindresorhus",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "https://sindresorhus.com"
},
"engines": {
"node": ">=10"
},
"scripts": {
"test": "xo && ava && tsd"
},
"files": [
"index.js",
"index.d.ts"
],
"keywords": [
"decompress",
"response",
"http",
"https",
"zlib",
"gzip",
"zip",
"deflate",
"unzip",
"ungzip",
"incoming",
"message",
"stream",
"compressed",
"brotli"
],
"dependencies": {
"mimic-response": "^3.1.0"
},
"devDependencies": {
"@types/node": "^14.0.1",
"ava": "^2.2.0",
"get-stream": "^5.0.0",
"pify": "^5.0.0",
"tsd": "^0.11.0",
"xo": "^0.30.0"
},
"xo": {
"rules": {
"@typescript-eslint/prefer-readonly-parameter-types": "off"
}
}
}

48
node_modules/decompress-response/readme.md generated vendored Normal file
View File

@@ -0,0 +1,48 @@
# decompress-response [![Build Status](https://travis-ci.com/sindresorhus/decompress-response.svg?branch=master)](https://travis-ci.com/sindresorhus/decompress-response)
> Decompress a HTTP response if needed
Decompresses the [response](https://nodejs.org/api/http.html#http_class_http_incomingmessage) from [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) if it's gzipped, deflated or compressed with Brotli, otherwise just passes it through.
Used by [`got`](https://github.com/sindresorhus/got).
## Install
```
$ npm install decompress-response
```
## Usage
```js
const http = require('http');
const decompressResponse = require('decompress-response');
http.get('https://sindresorhus.com', response => {
response = decompressResponse(response);
});
```
## API
### decompressResponse(response)
Returns the decompressed HTTP response stream.
#### response
Type: [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage)
The HTTP incoming stream with compressed data.
---
<div align="center">
<b>
<a href="https://tidelift.com/subscription/pkg/npm-decompress-response?utm_source=npm-decompress-response&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>
<br>
<sub>
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
</sub>
</div>

46
node_modules/deep-extend/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,46 @@
Changelog
=========
v0.6.0
------
- Updated "devDependencies" versions to fix vulnerability alerts
- Dropped support of io.js and node.js v0.12.x and lower since new versions of
"devDependencies" couldn't work with those old node.js versions
(minimal supported version of node.js now is v4.0.0)
v0.5.1
------
- Fix prototype pollution vulnerability (thanks to @mwakerman for the PR)
- Avoid using deprecated Buffer API (thanks to @ChALkeR for the PR)
v0.5.0
------
- Auto-testing provided by Travis CI;
- Support older Node.JS versions (`v0.11.x` and `v0.10.x`);
- Removed tests files from npm package.
v0.4.2
------
- Fix for `null` as an argument.
v0.4.1
------
- Removed test code from <b>npm</b> package
([see pull request #21](https://github.com/unclechu/node-deep-extend/pull/21));
- Increased minimal version of Node from `0.4.0` to `0.12.0`
(because can't run tests on lesser version anyway).
v0.4.0
------
- **WARNING!** Broken backward compatibility with `v0.3.x`;
- Fixed bug with extending arrays instead of cloning;
- Deep cloning for arrays;
- Check for own property;
- Fixed some documentation issues;
- Strict JS mode.

20
node_modules/deep-extend/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2013-2018, Viacheslav Lotsmanov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

91
node_modules/deep-extend/README.md generated vendored Normal file
View File

@@ -0,0 +1,91 @@
Deep Extend
===========
Recursive object extending.
[![Build Status](https://api.travis-ci.org/unclechu/node-deep-extend.svg?branch=master)](https://travis-ci.org/unclechu/node-deep-extend)
[![NPM](https://nodei.co/npm/deep-extend.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/deep-extend/)
Install
-------
```bash
$ npm install deep-extend
```
Usage
-----
```javascript
var deepExtend = require('deep-extend');
var obj1 = {
a: 1,
b: 2,
d: {
a: 1,
b: [],
c: { test1: 123, test2: 321 }
},
f: 5,
g: 123,
i: 321,
j: [1, 2]
};
var obj2 = {
b: 3,
c: 5,
d: {
b: { first: 'one', second: 'two' },
c: { test2: 222 }
},
e: { one: 1, two: 2 },
f: [],
g: (void 0),
h: /abc/g,
i: null,
j: [3, 4]
};
deepExtend(obj1, obj2);
console.log(obj1);
/*
{ a: 1,
b: 3,
d:
{ a: 1,
b: { first: 'one', second: 'two' },
c: { test1: 123, test2: 222 } },
f: [],
g: undefined,
c: 5,
e: { one: 1, two: 2 },
h: /abc/g,
i: null,
j: [3, 4] }
*/
```
Unit testing
------------
```bash
$ npm test
```
Changelog
---------
[CHANGELOG.md](./CHANGELOG.md)
Any issues?
-----------
Please, report about issues
[here](https://github.com/unclechu/node-deep-extend/issues).
License
-------
[MIT](./LICENSE)

1
node_modules/deep-extend/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require('./lib/deep-extend');

150
node_modules/deep-extend/lib/deep-extend.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
/*!
* @description Recursive object extending
* @author Viacheslav Lotsmanov <lotsmanov89@gmail.com>
* @license MIT
*
* The MIT License (MIT)
*
* Copyright (c) 2013-2018 Viacheslav Lotsmanov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
'use strict';
function isSpecificValue(val) {
return (
val instanceof Buffer
|| val instanceof Date
|| val instanceof RegExp
) ? true : false;
}
function cloneSpecificValue(val) {
if (val instanceof Buffer) {
var x = Buffer.alloc
? Buffer.alloc(val.length)
: new Buffer(val.length);
val.copy(x);
return x;
} else if (val instanceof Date) {
return new Date(val.getTime());
} else if (val instanceof RegExp) {
return new RegExp(val);
} else {
throw new Error('Unexpected situation');
}
}
/**
* Recursive cloning array.
*/
function deepCloneArray(arr) {
var clone = [];
arr.forEach(function (item, index) {
if (typeof item === 'object' && item !== null) {
if (Array.isArray(item)) {
clone[index] = deepCloneArray(item);
} else if (isSpecificValue(item)) {
clone[index] = cloneSpecificValue(item);
} else {
clone[index] = deepExtend({}, item);
}
} else {
clone[index] = item;
}
});
return clone;
}
function safeGetProperty(object, property) {
return property === '__proto__' ? undefined : object[property];
}
/**
* Extening object that entered in first argument.
*
* Returns extended object or false if have no target object or incorrect type.
*
* If you wish to clone source object (without modify it), just use empty new
* object as first argument, like this:
* deepExtend({}, yourObj_1, [yourObj_N]);
*/
var deepExtend = module.exports = function (/*obj_1, [obj_2], [obj_N]*/) {
if (arguments.length < 1 || typeof arguments[0] !== 'object') {
return false;
}
if (arguments.length < 2) {
return arguments[0];
}
var target = arguments[0];
// convert arguments to array and cut off target object
var args = Array.prototype.slice.call(arguments, 1);
var val, src, clone;
args.forEach(function (obj) {
// skip argument if isn't an object, is null, or is an array
if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) {
return;
}
Object.keys(obj).forEach(function (key) {
src = safeGetProperty(target, key); // source value
val = safeGetProperty(obj, key); // new value
// recursion prevention
if (val === target) {
return;
/**
* if new value isn't object then just overwrite by new value
* instead of extending.
*/
} else if (typeof val !== 'object' || val === null) {
target[key] = val;
return;
// just clone arrays (and recursive clone objects inside)
} else if (Array.isArray(val)) {
target[key] = deepCloneArray(val);
return;
// custom cloning and overwrite for specific objects
} else if (isSpecificValue(val)) {
target[key] = cloneSpecificValue(val);
return;
// overwrite by new value if source isn't object or array
} else if (typeof src !== 'object' || src === null || Array.isArray(src)) {
target[key] = deepExtend({}, val);
return;
// source value and new value is objects both, extending...
} else {
target[key] = deepExtend(src, val);
return;
}
});
});
return target;
};

62
node_modules/deep-extend/package.json generated vendored Normal file
View File

@@ -0,0 +1,62 @@
{
"name": "deep-extend",
"description": "Recursive object extending",
"license": "MIT",
"version": "0.6.0",
"homepage": "https://github.com/unclechu/node-deep-extend",
"keywords": [
"deep-extend",
"extend",
"deep",
"recursive",
"xtend",
"clone",
"merge",
"json"
],
"licenses": [
{
"type": "MIT",
"url": "https://raw.githubusercontent.com/unclechu/node-deep-extend/master/LICENSE"
}
],
"repository": {
"type": "git",
"url": "git://github.com/unclechu/node-deep-extend.git"
},
"author": "Viacheslav Lotsmanov <lotsmanov89@gmail.com>",
"bugs": "https://github.com/unclechu/node-deep-extend/issues",
"contributors": [
{
"name": "Romain Prieto",
"url": "https://github.com/rprieto"
},
{
"name": "Max Maximov",
"url": "https://github.com/maxmaximov"
},
{
"name": "Marshall Bowers",
"url": "https://github.com/maxdeviant"
},
{
"name": "Misha Wakerman",
"url": "https://github.com/mwakerman"
}
],
"main": "lib/deep-extend.js",
"engines": {
"node": ">=4.0.0"
},
"scripts": {
"test": "./node_modules/.bin/mocha"
},
"devDependencies": {
"mocha": "5.2.0",
"should": "13.2.1"
},
"files": [
"index.js",
"lib/"
]
}

201
node_modules/detect-libc/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Some files were not shown because too many files have changed in this diff Show More