diff --git a/node_modules/.bin/prebuild-install b/node_modules/.bin/prebuild-install new file mode 100644 index 000000000..0aeffa505 --- /dev/null +++ b/node_modules/.bin/prebuild-install @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@" +else + exec node "$basedir/../prebuild-install/bin.js" "$@" +fi diff --git a/node_modules/.bin/prebuild-install.cmd b/node_modules/.bin/prebuild-install.cmd new file mode 100644 index 000000000..21ff9042d --- /dev/null +++ b/node_modules/.bin/prebuild-install.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %* diff --git a/node_modules/.bin/prebuild-install.ps1 b/node_modules/.bin/prebuild-install.ps1 new file mode 100644 index 000000000..6e657a3b4 --- /dev/null +++ b/node_modules/.bin/prebuild-install.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args + } else { + & "node$exe" "$basedir/../prebuild-install/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/rc b/node_modules/.bin/rc new file mode 100644 index 000000000..e31cd8350 --- /dev/null +++ b/node_modules/.bin/rc @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../rc/cli.js" "$@" +else + exec node "$basedir/../rc/cli.js" "$@" +fi diff --git a/node_modules/.bin/rc.cmd b/node_modules/.bin/rc.cmd new file mode 100644 index 000000000..be16b7333 --- /dev/null +++ b/node_modules/.bin/rc.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %* diff --git a/node_modules/.bin/rc.ps1 b/node_modules/.bin/rc.ps1 new file mode 100644 index 000000000..9a9b6e376 --- /dev/null +++ b/node_modules/.bin/rc.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../rc/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../rc/cli.js" $args + } else { + & "node$exe" "$basedir/../rc/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 100644 index 000000000..77443e787 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/node_modules/.bin/semver.cmd b/node_modules/.bin/semver.cmd new file mode 100644 index 000000000..9913fa9d0 --- /dev/null +++ b/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/node_modules/.bin/semver.ps1 b/node_modules/.bin/semver.ps1 new file mode 100644 index 000000000..314717ad4 --- /dev/null +++ b/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 7884b79fe..82c029507 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -30,6 +30,48 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/body-parser": { "version": "1.20.1", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", @@ -75,6 +117,29 @@ "concat-map": "0.0.1" } }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", @@ -111,6 +176,48 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -167,6 +274,14 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, + "node_modules/crc-32": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-0.3.0.tgz", + "integrity": "sha512-kucVIjOmMc1f0tv53BJ/5WIX+MGLcKuoBhnGqQrgKJNqLByb/sVMWfW/Aw6hw0jgcqjJ2pi9E5y32zOIpaUlsA==", + "engines": { + "node": ">=0.8" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -183,6 +298,28 @@ } } }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -200,6 +337,14 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-libc": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", + "engines": { + "node": ">=8" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -213,6 +358,14 @@ "node": ">= 0.8" } }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -226,6 +379,14 @@ "node": ">= 0.6" } }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "engines": { + "node": ">=6" + } + }, "node_modules/express": { "version": "4.18.2", "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", @@ -345,6 +506,11 @@ "node": ">= 0.6" } }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -368,6 +534,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" + }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -435,6 +606,25 @@ "node": ">=0.10.0" } }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -449,6 +639,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "node_modules/ipaddr.js": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", @@ -457,11 +652,27 @@ "node": ">= 0.10" } }, + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + }, "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -513,6 +724,17 @@ "node": ">= 0.6" } }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -543,6 +765,11 @@ "mkdirp": "bin/cmd.js" } }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -565,6 +792,11 @@ "node": ">= 6.0.0" } }, + "node_modules/napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==" + }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -573,6 +805,22 @@ "node": ">= 0.6" } }, + "node_modules/node-abi": { + "version": "3.31.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz", + "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" + }, "node_modules/node-rest-client": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/node-rest-client/-/node-rest-client-3.1.1.tgz", @@ -642,6 +890,53 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "node_modules/png-chunk-text": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/png-chunk-text/-/png-chunk-text-1.0.0.tgz", + "integrity": "sha512-DEROKU3SkkLGWNMzru3xPVgxyd48UGuMSZvioErCure6yhOc/pRH2ZV+SEn7nmaf7WNf3NdIpH+UTrRdKyq9Lw==" + }, + "node_modules/png-chunks-encode": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/png-chunks-encode/-/png-chunks-encode-1.0.0.tgz", + "integrity": "sha512-J1jcHgbQRsIIgx5wxW9UmCymV3wwn4qCCJl6KYgEU/yHCh/L2Mwq/nMOkRPtmV79TLxRZj5w3tH69pvygFkDqA==", + "dependencies": { + "crc-32": "^0.3.0", + "sliced": "^1.0.1" + } + }, + "node_modules/png-chunks-extract": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/png-chunks-extract/-/png-chunks-extract-1.0.0.tgz", + "integrity": "sha512-ZiVwF5EJ0DNZyzAqld8BP1qyJBaGOFaq9zl579qfbkcmOwWLLO4I9L8i2O4j3HkI6/35i0nKG2n+dZplxiT89Q==", + "dependencies": { + "crc-32": "^0.3.0" + } + }, + "node_modules/prebuild-install": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", + "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^1.0.1", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -659,6 +954,15 @@ "node": ">= 0.10" } }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, "node_modules/qs": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", @@ -695,6 +999,20 @@ "node": ">= 0.8" } }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, "node_modules/readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", @@ -757,6 +1075,20 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/send": { "version": "0.18.0", "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", @@ -817,6 +1149,28 @@ "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" }, + "node_modules/sharp": { + "version": "0.31.3", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.31.3.tgz", + "integrity": "sha512-XcR4+FCLBFKw1bdB+GEhnUNXNXvnt0tDo4WsBsraKymuo/IAuPuCBVAL2wIkUw2r/dwFW5Q5+g66Kwl2dgDFVg==", + "hasInstallScript": true, + "dependencies": { + "color": "^4.2.3", + "detect-libc": "^2.0.1", + "node-addon-api": "^5.0.0", + "prebuild-install": "^7.1.1", + "semver": "^7.3.8", + "simple-get": "^4.0.1", + "tar-fs": "^2.1.1", + "tunnel-agent": "^0.6.0" + }, + "engines": { + "node": ">=14.15.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, "node_modules/side-channel": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", @@ -830,6 +1184,62 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==", + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/sliced": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", + "integrity": "sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA==" + }, "node_modules/statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -859,6 +1269,53 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar-stream/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", @@ -867,6 +1324,17 @@ "node": ">=0.6" } }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -945,6 +1413,11 @@ "engines": { "node": ">=0.4" } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } } diff --git a/node_modules/base64-js/LICENSE b/node_modules/base64-js/LICENSE new file mode 100644 index 000000000..6d52b8acf --- /dev/null +++ b/node_modules/base64-js/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jameson Little + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/base64-js/README.md b/node_modules/base64-js/README.md new file mode 100644 index 000000000..b42a48f41 --- /dev/null +++ b/node_modules/base64-js/README.md @@ -0,0 +1,34 @@ +base64-js +========= + +`base64-js` does basic base64 encoding/decoding in pure JS. + +[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js) + +Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data. + +Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does. + +## install + +With [npm](https://npmjs.org) do: + +`npm install base64-js` and `var base64js = require('base64-js')` + +For use in web browsers do: + +`` + +[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) + +## methods + +`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. + +* `byteLength` - Takes a base64 string and returns length of byte array +* `toByteArray` - Takes a base64 string and returns a byte array +* `fromByteArray` - Takes a byte array and returns a base64 string + +## license + +MIT diff --git a/node_modules/base64-js/base64js.min.js b/node_modules/base64-js/base64js.min.js new file mode 100644 index 000000000..908ac83fd --- /dev/null +++ b/node_modules/base64-js/base64js.min.js @@ -0,0 +1 @@ +(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} diff --git a/node_modules/base64-js/package.json b/node_modules/base64-js/package.json new file mode 100644 index 000000000..c3972e39f --- /dev/null +++ b/node_modules/base64-js/package.json @@ -0,0 +1,47 @@ +{ + "name": "base64-js", + "description": "Base64 encoding/decoding in pure JS", + "version": "1.5.1", + "author": "T. Jameson Little ", + "typings": "index.d.ts", + "bugs": { + "url": "https://github.com/beatgammit/base64-js/issues" + }, + "devDependencies": { + "babel-minify": "^0.5.1", + "benchmark": "^2.1.4", + "browserify": "^16.3.0", + "standard": "*", + "tape": "4.x" + }, + "homepage": "https://github.com/beatgammit/base64-js", + "keywords": [ + "base64" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/beatgammit/base64-js.git" + }, + "scripts": { + "build": "browserify -s base64js -r ./ | minify > base64js.min.js", + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/bl/.travis.yml b/node_modules/bl/.travis.yml new file mode 100644 index 000000000..016eaf556 --- /dev/null +++ b/node_modules/bl/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +arch: + - amd64 + - ppc64le +language: node_js +node_js: + - '6' + - '8' + - '10' + - '12' + - '14' + - '15' + - lts/* +notifications: + email: + - rod@vagg.org + - matteo.collina@gmail.com diff --git a/node_modules/bl/BufferList.js b/node_modules/bl/BufferList.js new file mode 100644 index 000000000..471ee7788 --- /dev/null +++ b/node_modules/bl/BufferList.js @@ -0,0 +1,396 @@ +'use strict' + +const { Buffer } = require('buffer') +const symbol = Symbol.for('BufferList') + +function BufferList (buf) { + if (!(this instanceof BufferList)) { + return new BufferList(buf) + } + + BufferList._init.call(this, buf) +} + +BufferList._init = function _init (buf) { + Object.defineProperty(this, symbol, { value: true }) + + this._bufs = [] + this.length = 0 + + if (buf) { + this.append(buf) + } +} + +BufferList.prototype._new = function _new (buf) { + return new BufferList(buf) +} + +BufferList.prototype._offset = function _offset (offset) { + if (offset === 0) { + return [0, 0] + } + + let tot = 0 + + for (let i = 0; i < this._bufs.length; i++) { + const _t = tot + this._bufs[i].length + if (offset < _t || i === this._bufs.length - 1) { + return [i, offset - tot] + } + tot = _t + } +} + +BufferList.prototype._reverseOffset = function (blOffset) { + const bufferId = blOffset[0] + let offset = blOffset[1] + + for (let i = 0; i < bufferId; i++) { + offset += this._bufs[i].length + } + + return offset +} + +BufferList.prototype.get = function get (index) { + if (index > this.length || index < 0) { + return undefined + } + + const offset = this._offset(index) + + return this._bufs[offset[0]][offset[1]] +} + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start === 'number' && start < 0) { + start += this.length + } + + if (typeof end === 'number' && end < 0) { + end += this.length + } + + return this.copy(null, 0, start, end) +} + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart !== 'number' || srcStart < 0) { + srcStart = 0 + } + + if (typeof srcEnd !== 'number' || srcEnd > this.length) { + srcEnd = this.length + } + + if (srcStart >= this.length) { + return dst || Buffer.alloc(0) + } + + if (srcEnd <= 0) { + return dst || Buffer.alloc(0) + } + + const copy = !!dst + const off = this._offset(srcStart) + const len = srcEnd - srcStart + let bytes = len + let bufoff = (copy && dstStart) || 0 + let start = off[1] + + // copy/slice everything + if (srcStart === 0 && srcEnd === this.length) { + if (!copy) { + // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (let i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) { + // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + } + + for (let i = off[0]; i < this._bufs.length; i++) { + const l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + bufoff += l + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + bufoff += l + break + } + + bytes -= l + + if (start) { + start = 0 + } + } + + // safeguard so that we don't return uninitialized memory + if (dst.length > bufoff) return dst.slice(0, bufoff) + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = typeof end !== 'number' ? this.length : end + + if (start < 0) { + start += this.length + } + + if (end < 0) { + end += this.length + } + + if (start === end) { + return this._new() + } + + const startOffset = this._offset(start) + const endOffset = this._offset(end) + const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] === 0) { + buffers.pop() + } else { + buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1]) + } + + if (startOffset[1] !== 0) { + buffers[0] = buffers[0].slice(startOffset[1]) + } + + return this._new(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes) + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) return this + + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + + return this +} + +BufferList.prototype.duplicate = function duplicate () { + const copy = this._new() + + for (let i = 0; i < this._bufs.length; i++) { + copy.append(this._bufs[i]) + } + + return copy +} + +BufferList.prototype.append = function append (buf) { + if (buf == null) { + return this + } + + if (buf.buffer) { + // append a view of the underlying ArrayBuffer + this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)) + } else if (Array.isArray(buf)) { + for (let i = 0; i < buf.length; i++) { + this.append(buf[i]) + } + } else if (this._isBufferList(buf)) { + // unwrap argument into individual BufferLists + for (let i = 0; i < buf._bufs.length; i++) { + this.append(buf._bufs[i]) + } + } else { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf === 'number') { + buf = buf.toString() + } + + this._appendBuffer(Buffer.from(buf)) + } + + return this +} + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + +BufferList.prototype.indexOf = function (search, offset, encoding) { + if (encoding === undefined && typeof offset === 'string') { + encoding = offset + offset = undefined + } + + if (typeof search === 'function' || Array.isArray(search)) { + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') + } else if (typeof search === 'number') { + search = Buffer.from([search]) + } else if (typeof search === 'string') { + search = Buffer.from(search, encoding) + } else if (this._isBufferList(search)) { + search = search.slice() + } else if (Array.isArray(search.buffer)) { + search = Buffer.from(search.buffer, search.byteOffset, search.byteLength) + } else if (!Buffer.isBuffer(search)) { + search = Buffer.from(search) + } + + offset = Number(offset || 0) + + if (isNaN(offset)) { + offset = 0 + } + + if (offset < 0) { + offset = this.length + offset + } + + if (offset < 0) { + offset = 0 + } + + if (search.length === 0) { + return offset > this.length ? this.length : offset + } + + const blOffset = this._offset(offset) + let blIndex = blOffset[0] // index of which internal buffer we're working on + let buffOffset = blOffset[1] // offset of the internal buffer we're working on + + // scan over each buffer + for (; blIndex < this._bufs.length; blIndex++) { + const buff = this._bufs[blIndex] + + while (buffOffset < buff.length) { + const availableWindow = buff.length - buffOffset + + if (availableWindow >= search.length) { + const nativeSearchResult = buff.indexOf(search, buffOffset) + + if (nativeSearchResult !== -1) { + return this._reverseOffset([blIndex, nativeSearchResult]) + } + + buffOffset = buff.length - search.length + 1 // end of native search window + } else { + const revOffset = this._reverseOffset([blIndex, buffOffset]) + + if (this._match(revOffset, search)) { + return revOffset + } + + buffOffset++ + } + } + + buffOffset = 0 + } + + return -1 +} + +BufferList.prototype._match = function (offset, search) { + if (this.length - offset < search.length) { + return false + } + + for (let searchOffset = 0; searchOffset < search.length; searchOffset++) { + if (this.get(offset + searchOffset) !== search[searchOffset]) { + return false + } + } + return true +} + +;(function () { + const methods = { + readDoubleBE: 8, + readDoubleLE: 8, + readFloatBE: 4, + readFloatLE: 4, + readInt32BE: 4, + readInt32LE: 4, + readUInt32BE: 4, + readUInt32LE: 4, + readInt16BE: 2, + readInt16LE: 2, + readUInt16BE: 2, + readUInt16LE: 2, + readInt8: 1, + readUInt8: 1, + readIntBE: null, + readIntLE: null, + readUIntBE: null, + readUIntLE: null + } + + for (const m in methods) { + (function (m) { + if (methods[m] === null) { + BufferList.prototype[m] = function (offset, byteLength) { + return this.slice(offset, offset + byteLength)[m](0, byteLength) + } + } else { + BufferList.prototype[m] = function (offset = 0) { + return this.slice(offset, offset + methods[m])[m](0) + } + } + }(m)) + } +}()) + +// Used internally by the class and also as an indicator of this object being +// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser +// environment because there could be multiple different copies of the +// BufferList class and some `BufferList`s might be `BufferList`s. +BufferList.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferList || BufferList.isBufferList(b) +} + +BufferList.isBufferList = function isBufferList (b) { + return b != null && b[symbol] +} + +module.exports = BufferList diff --git a/node_modules/bl/LICENSE.md b/node_modules/bl/LICENSE.md new file mode 100644 index 000000000..ecbe51637 --- /dev/null +++ b/node_modules/bl/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2013-2019 bl contributors +---------------------------------- + +*bl contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bl/README.md b/node_modules/bl/README.md new file mode 100644 index 000000000..9680b1dcb --- /dev/null +++ b/node_modules/bl/README.md @@ -0,0 +1,247 @@ +# bl *(BufferList)* + +[![Build Status](https://api.travis-ci.com/rvagg/bl.svg?branch=master)](https://travis-ci.com/rvagg/bl/) + +**A Node.js Buffer list collector, reader and streamer thingy.** + +[![NPM](https://nodei.co/npm/bl.svg)](https://nodei.co/npm/bl/) + +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! + +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. + +```js +const { BufferList } = require('bl') + +const bl = new BufferList() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append('hi') // bl will also accept & convert Strings +bl.append(Buffer.from('j')) +bl.append(Buffer.from([ 0x3, 0x4 ])) + +console.log(bl.length) // 12 + +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' +console.log(bl.slice(3, 6).toString('ascii')) // 'def' +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' + +console.log(bl.indexOf('def')) // 3 +console.log(bl.indexOf('asdf')) // -1 + +// or just use toString! +console.log(bl.toString()) // 'abcdefghij\u0003\u0004' +console.log(bl.toString('ascii', 3, 8)) // 'defgh' +console.log(bl.toString('ascii', 5, 10)) // 'fghij' + +// other standard Buffer readables +console.log(bl.readUInt16BE(10)) // 0x0304 +console.log(bl.readUInt16LE(10)) // 0x0403 +``` + +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +fs.createReadStream('README.md') + .pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required + // `data` is a complete Buffer object containing the full data + console.log(data.toString()) + })) +``` + +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. + +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): + +```js +const hyperquest = require('hyperquest') +const { BufferListStream } = require('bl') + +const url = 'https://raw.github.com/rvagg/bl/master/README.md' + +hyperquest(url).pipe(BufferListStream((err, data) => { + console.log(data.toString()) +})) +``` + +Or, use it as a readable stream to recompose a list of Buffers to an output source: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +var bl = new BufferListStream() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append(Buffer.from('hi')) +bl.append(Buffer.from('j')) + +bl.pipe(fs.createWriteStream('gibberish.txt')) +``` + +## API + + * new BufferList([ buf ]) + * BufferList.isBufferList(obj) + * bl.length + * bl.append(buffer) + * bl.get(index) + * bl.indexOf(value[, byteOffset][, encoding]) + * bl.slice([ start[, end ] ]) + * bl.shallowSlice([ start[, end ] ]) + * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) + * bl.duplicate() + * bl.consume(bytes) + * bl.toString([encoding, [ start, [ end ]]]) + * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + * new BufferListStream([ callback ]) + +-------------------------------------------------------- + +### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ]) +No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferList } = require('bl') +const bl = BufferList() + +// equivalent to: + +const { BufferList } = require('bl') +const bl = new BufferList() +``` + +-------------------------------------------------------- + +### BufferList.isBufferList(obj) +Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise. + +N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added. + +-------------------------------------------------------- + +### bl.length +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. + +-------------------------------------------------------- + +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. + +-------------------------------------------------------- + +### bl.get(index) +`get()` will return the byte at the specified index. + +-------------------------------------------------------- + +### bl.indexOf(value[, byteOffset][, encoding]) +`get()` will return the byte at the specified index. +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present. + +-------------------------------------------------------- + +### bl.slice([ start, [ end ] ]) +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. + +-------------------------------------------------------- + +### bl.shallowSlice([ start, [ end ] ]) +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +No copies will be performed. All buffers in the result share memory with the original list. + +-------------------------------------------------------- + +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. + +-------------------------------------------------------- + +### bl.duplicate() +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: + +```js +var bl = new BufferListStream() + +bl.append('hello') +bl.append(' world') +bl.append('\n') + +bl.duplicate().pipe(process.stdout, { end: false }) + +console.log(bl.toString()) +``` + +-------------------------------------------------------- + +### bl.consume(bytes) +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. + +-------------------------------------------------------- + +### bl.toString([encoding, [ start, [ end ]]]) +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. + +-------------------------------------------------------- + +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. + +See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. + +-------------------------------------------------------- + +### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) +**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance. + +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. + +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferListStream } = require('bl') +const bl = BufferListStream() + +// equivalent to: + +const { BufferListStream } = require('bl') +const bl = new BufferListStream() +``` + +N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`: + +```js +const { BufferListStream } = require('bl') +// equivalent to: +const BufferListStream = require('bl') +``` + +-------------------------------------------------------- + +## Contributors + +**bl** is brought to you by the following hackers: + + * [Rod Vagg](https://github.com/rvagg) + * [Matteo Collina](https://github.com/mcollina) + * [Jarett Cruger](https://github.com/jcrugzz) + + +## License & copyright + +Copyright (c) 2013-2019 bl contributors (listed above). + +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/bl/bl.js b/node_modules/bl/bl.js new file mode 100644 index 000000000..40228f879 --- /dev/null +++ b/node_modules/bl/bl.js @@ -0,0 +1,84 @@ +'use strict' + +const DuplexStream = require('readable-stream').Duplex +const inherits = require('inherits') +const BufferList = require('./BufferList') + +function BufferListStream (callback) { + if (!(this instanceof BufferListStream)) { + return new BufferListStream(callback) + } + + if (typeof callback === 'function') { + this._callback = callback + + const piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + + callback = null + } + + BufferList._init.call(this, callback) + DuplexStream.call(this) +} + +inherits(BufferListStream, DuplexStream) +Object.assign(BufferListStream.prototype, BufferList.prototype) + +BufferListStream.prototype._new = function _new (callback) { + return new BufferListStream(callback) +} + +BufferListStream.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback === 'function') { + callback() + } +} + +BufferListStream.prototype._read = function _read (size) { + if (!this.length) { + return this.push(null) + } + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + +BufferListStream.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + +BufferListStream.prototype._destroy = function _destroy (err, cb) { + this._bufs.length = 0 + this.length = 0 + cb(err) +} + +BufferListStream.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b) +} + +BufferListStream.isBufferList = BufferList.isBufferList + +module.exports = BufferListStream +module.exports.BufferListStream = BufferListStream +module.exports.BufferList = BufferList diff --git a/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 000000000..f478d58dc --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 000000000..16ffb93f2 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/bl/node_modules/readable-stream/LICENSE b/node_modules/bl/node_modules/readable-stream/LICENSE new file mode 100644 index 000000000..2873b3b2e --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/bl/node_modules/readable-stream/README.md b/node_modules/bl/node_modules/readable-stream/README.md new file mode 100644 index 000000000..6f035ab16 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/node_modules/bl/node_modules/readable-stream/errors-browser.js b/node_modules/bl/node_modules/readable-stream/errors-browser.js new file mode 100644 index 000000000..fb8e73e18 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/node_modules/bl/node_modules/readable-stream/errors.js b/node_modules/bl/node_modules/readable-stream/errors.js new file mode 100644 index 000000000..8471526d6 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/node_modules/bl/node_modules/readable-stream/experimentalWarning.js b/node_modules/bl/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 000000000..78e841495 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 000000000..675251922 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,139 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. +'use strict'; +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = require('./_stream_readable'); + +var Writable = require('./_stream_writable'); + +require('inherits')(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 000000000..32e7414c5 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,39 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +require('inherits')(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 000000000..192d45148 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1124 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict'; + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = require('events').EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = require('util'); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = require('./internal/streams/buffer_list'); + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +require('inherits')(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 000000000..41a738c4e --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,201 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. +'use strict'; + +module.exports = Transform; + +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = require('./_stream_duplex'); + +require('inherits')(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 000000000..a2634d7c2 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,697 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. +'use strict'; + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +require('inherits')(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 000000000..9fb615a2f --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,207 @@ +'use strict'; + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = require('./end-of-stream'); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 000000000..cdea425f1 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,210 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = require('buffer'), + Buffer = _require.Buffer; + +var _require2 = require('util'), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 000000000..3268a16f3 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,105 @@ +'use strict'; // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; +} + +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 000000000..831f286d9 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,104 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 000000000..a4ce56f3c --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 000000000..6c4128441 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 000000000..658990988 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,97 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 000000000..19887eb8a --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,27 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 000000000..9332a3fda --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 000000000..ce2ad5b6e --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/bl/node_modules/readable-stream/package.json b/node_modules/bl/node_modules/readable-stream/package.json new file mode 100644 index 000000000..0b0c4bd20 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.0", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/bl/node_modules/readable-stream/readable-browser.js b/node_modules/bl/node_modules/readable-stream/readable-browser.js new file mode 100644 index 000000000..adbf60de8 --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/bl/node_modules/readable-stream/readable.js b/node_modules/bl/node_modules/readable-stream/readable.js new file mode 100644 index 000000000..9e0ca120d --- /dev/null +++ b/node_modules/bl/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/node_modules/bl/package.json b/node_modules/bl/package.json new file mode 100644 index 000000000..3b2be3f48 --- /dev/null +++ b/node_modules/bl/package.json @@ -0,0 +1,37 @@ +{ + "name": "bl", + "version": "4.1.0", + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", + "license": "MIT", + "main": "bl.js", + "scripts": { + "lint": "standard *.js test/*.js", + "test": "npm run lint && node test/test.js | faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/rvagg/bl.git" + }, + "homepage": "https://github.com/rvagg/bl", + "authors": [ + "Rod Vagg (https://github.com/rvagg)", + "Matteo Collina (https://github.com/mcollina)", + "Jarett Cruger (https://github.com/jcrugzz)" + ], + "keywords": [ + "buffer", + "buffers", + "stream", + "awesomesauce" + ], + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + }, + "devDependencies": { + "faucet": "~0.0.1", + "standard": "^14.3.0", + "tape": "^4.11.0" + } +} diff --git a/node_modules/bl/test/convert.js b/node_modules/bl/test/convert.js new file mode 100644 index 000000000..9f3e23599 --- /dev/null +++ b/node_modules/bl/test/convert.js @@ -0,0 +1,21 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('convert from BufferList to BufferListStream', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bl = new BufferList(data) + const bls = new BufferListStream(bl) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) + +tape('convert from BufferListStream to BufferList', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bls = new BufferListStream(data) + const bl = new BufferList(bls) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) diff --git a/node_modules/bl/test/indexOf.js b/node_modules/bl/test/indexOf.js new file mode 100644 index 000000000..62dcb01f3 --- /dev/null +++ b/node_modules/bl/test/indexOf.js @@ -0,0 +1,492 @@ +'use strict' + +const tape = require('tape') +const BufferList = require('../') +const { Buffer } = require('buffer') + +tape('indexOf single byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg', '12345']) + + t.equal(bl.indexOf('e'), 4) + t.equal(bl.indexOf('e', 5), 11) + t.equal(bl.indexOf('e', 12), -1) + t.equal(bl.indexOf('5'), 18) + + t.end() +}) + +tape('indexOf multiple byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('ef'), 4) + t.equal(bl.indexOf('ef', 5), 11) + + t.end() +}) + +tape('indexOf multiple byte needles across buffer boundaries', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('fgabc'), 5) + + t.end() +}) + +tape('indexOf takes a Uint8Array search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf takes a buffer list search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new BufferList('fgabc') + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf a zero byte needle', (t) => { + const b = new BufferList('abcdef') + const bufEmpty = Buffer.from('') + + t.equal(b.indexOf(''), 0) + t.equal(b.indexOf('', 1), 1) + t.equal(b.indexOf('', b.length + 1), b.length) + t.equal(b.indexOf('', Infinity), b.length) + t.equal(b.indexOf(bufEmpty), 0) + t.equal(b.indexOf(bufEmpty, 1), 1) + t.equal(b.indexOf(bufEmpty, b.length + 1), b.length) + t.equal(b.indexOf(bufEmpty, Infinity), b.length) + + t.end() +}) + +tape('indexOf buffers smaller and larger than the needle', (t) => { + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab']) + + t.equal(bl.indexOf('fgabc'), 5) + t.equal(bl.indexOf('fgabc', 6), 12) + t.equal(bl.indexOf('fgabc', 13), -1) + + t.end() +}) + +// only present in node 6+ +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => { + const b = new BufferList('abcdef') + + // test latin1 encoding + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf('d', 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'), + 0 + ) + + // test binary encoding + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf('d', 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'binary')) + .indexOf('\u00e8', 'binary'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf('\u00e8', 'binary'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'), + 0 + ) + + t.end() +}) + +tape('indexOf the entire nodejs10 buffer test suite', (t) => { + const b = new BufferList('abcdef') + const bufA = Buffer.from('a') + const bufBc = Buffer.from('bc') + const bufF = Buffer.from('f') + const bufZ = Buffer.from('z') + + const stringComparison = 'abcdef' + + t.equal(b.indexOf('a'), 0) + t.equal(b.indexOf('a', 1), -1) + t.equal(b.indexOf('a', -1), -1) + t.equal(b.indexOf('a', -4), -1) + t.equal(b.indexOf('a', -b.length), 0) + t.equal(b.indexOf('a', NaN), 0) + t.equal(b.indexOf('a', -Infinity), 0) + t.equal(b.indexOf('a', Infinity), -1) + t.equal(b.indexOf('bc'), 1) + t.equal(b.indexOf('bc', 2), -1) + t.equal(b.indexOf('bc', -1), -1) + t.equal(b.indexOf('bc', -3), -1) + t.equal(b.indexOf('bc', -5), 1) + t.equal(b.indexOf('bc', NaN), 1) + t.equal(b.indexOf('bc', -Infinity), 1) + t.equal(b.indexOf('bc', Infinity), -1) + t.equal(b.indexOf('f'), b.length - 1) + t.equal(b.indexOf('z'), -1) + + // empty search tests + t.equal(b.indexOf(bufA), 0) + t.equal(b.indexOf(bufA, 1), -1) + t.equal(b.indexOf(bufA, -1), -1) + t.equal(b.indexOf(bufA, -4), -1) + t.equal(b.indexOf(bufA, -b.length), 0) + t.equal(b.indexOf(bufA, NaN), 0) + t.equal(b.indexOf(bufA, -Infinity), 0) + t.equal(b.indexOf(bufA, Infinity), -1) + t.equal(b.indexOf(bufBc), 1) + t.equal(b.indexOf(bufBc, 2), -1) + t.equal(b.indexOf(bufBc, -1), -1) + t.equal(b.indexOf(bufBc, -3), -1) + t.equal(b.indexOf(bufBc, -5), 1) + t.equal(b.indexOf(bufBc, NaN), 1) + t.equal(b.indexOf(bufBc, -Infinity), 1) + t.equal(b.indexOf(bufBc, Infinity), -1) + t.equal(b.indexOf(bufF), b.length - 1) + t.equal(b.indexOf(bufZ), -1) + t.equal(b.indexOf(0x61), 0) + t.equal(b.indexOf(0x61, 1), -1) + t.equal(b.indexOf(0x61, -1), -1) + t.equal(b.indexOf(0x61, -4), -1) + t.equal(b.indexOf(0x61, -b.length), 0) + t.equal(b.indexOf(0x61, NaN), 0) + t.equal(b.indexOf(0x61, -Infinity), 0) + t.equal(b.indexOf(0x61, Infinity), -1) + t.equal(b.indexOf(0x0), -1) + + // test offsets + t.equal(b.indexOf('d', 2), 3) + t.equal(b.indexOf('f', 5), 5) + t.equal(b.indexOf('f', -1), 5) + t.equal(b.indexOf('f', 6), -1) + + t.equal(b.indexOf(Buffer.from('d'), 2), 3) + t.equal(b.indexOf(Buffer.from('f'), 5), 5) + t.equal(b.indexOf(Buffer.from('f'), -1), 5) + t.equal(b.indexOf(Buffer.from('f'), 6), -1) + + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1) + + // test invalid and uppercase encoding + t.equal(b.indexOf('b', 'utf8'), 1) + t.equal(b.indexOf('b', 'UTF8'), 1) + t.equal(b.indexOf('62', 'HEX'), 1) + t.throws(() => b.indexOf('bad', 'enc'), TypeError) + + // test hex encoding + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf('64', 0, 'hex'), + 3 + ) + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'), + 3 + ) + + // test base64 encoding + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf('ZA==', 0, 'base64'), + 3 + ) + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'), + 3 + ) + + // test ascii encoding + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf('d', 0, 'ascii'), + 3 + ) + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'), + 3 + ) + + // test optional offset with passed encoding + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4) + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4) + + { + // test usc2 encoding + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2')) + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2')) + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2')) + t.equal(4, twoByteString.indexOf( + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2')) + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2')) + } + + const mixedByteStringUcs2 = + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2') + + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2')) + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2')) + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2')) + + t.equal( + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2')) + t.equal( + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')) + t.equal( + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')) + + { + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + // Test single char pattern + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2')) + let index = twoByteString.indexOf('\u0391', 0, 'ucs2') + t.equal(2, index, `Alpha - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 0, 'ucs2') + t.equal(4, index, `First Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 6, 'ucs2') + t.equal(6, index, `Second Sigma - at index ${index}`) + index = twoByteString.indexOf('\u0395', 0, 'ucs2') + t.equal(8, index, `Epsilon - at index ${index}`) + index = twoByteString.indexOf('\u0392', 0, 'ucs2') + t.equal(-1, index, `Not beta - at index ${index}`) + + // Test multi-char pattern + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2') + t.equal(0, index, `Lambda Alpha - at index ${index}`) + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2') + t.equal(2, index, `Alpha Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2') + t.equal(4, index, `Sigma Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2') + t.equal(6, index, `Sigma Epsilon - at index ${index}`) + } + + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395') + + t.equal(5, mixedByteStringUtf8.indexOf('bc')) + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5)) + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8)) + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3')) + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396')) + + // Test complex string indexOf algorithms. Only trigger for long strings. + // Long string that isn't a simple repeat of a shorter string. + let longString = 'A' + for (let i = 66; i < 76; i++) { // from 'B' to 'K' + longString = longString + String.fromCharCode(i) + longString + } + + const longBufferString = Buffer.from(longString) + + // pattern of 15 chars, repeated every 16 chars in long + let pattern = 'ABACABADABACABA' + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { + const index = longBufferString.indexOf(pattern, i) + t.equal((i + 15) & ~0xf, index, + `Long ABACABA...-string at index ${i}`) + } + + let index = longBufferString.indexOf('AJABACA') + t.equal(510, index, `Long AJABACA, First J - at index ${index}`) + index = longBufferString.indexOf('AJABACA', 511) + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`) + + pattern = 'JABACABADABACABA' + index = longBufferString.indexOf(pattern) + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`) + index = longBufferString.indexOf(pattern, 512) + t.equal( + 1535, index, `Long JABACABA..., Second J - at index ${index}`) + + // Search for a non-ASCII string in a pure ASCII string. + const asciiString = Buffer.from( + 'somethingnotatallsinisterwhichalsoworks') + t.equal(-1, asciiString.indexOf('\x2061')) + t.equal(3, asciiString.indexOf('eth', 0)) + + // Search in string containing many non-ASCII chars. + const allCodePoints = [] + for (let i = 0; i < 65536; i++) { + allCodePoints[i] = i + } + + const allCharsString = String.fromCharCode.apply(String, allCodePoints) + const allCharsBufferUtf8 = Buffer.from(allCharsString) + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2') + + // Search for string long enough to trigger complex search with ASCII pattern + // and UC16 subject. + t.equal(-1, allCharsBufferUtf8.indexOf('notfound')) + t.equal(-1, allCharsBufferUcs2.indexOf('notfound')) + + // Needle is longer than haystack, but only because it's encoded as UTF-16 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1) + + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0) + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1) + + // Haystack has odd length, but the needle is UCS2. + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1) + + { + // Find substrings in Utf8. + const lengths = [1, 3, 15] // Single char, simple and complex. + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b] + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] + let length = lengths[lengthIndex] + + if (index + length > 0x7F) { + length = 2 * length + } + + if (index + length > 0x7FF) { + length = 3 * length + } + + if (index + length > 0xFFFF) { + length = 4 * length + } + + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length) + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8)) + + const patternStringUtf8 = patternBufferUtf8.toString() + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8)) + } + } + } + + { + // Find substrings in Usc2. + const lengths = [2, 4, 16] // Single char, simple and complex. + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0] + + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] * 2 + const length = lengths[lengthIndex] + + const patternBufferUcs2 = + allCharsBufferUcs2.slice(index, index + length) + t.equal( + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2')) + + const patternStringUcs2 = patternBufferUcs2.toString('ucs2') + t.equal( + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2')) + } + } + } + + [ + () => {}, + {}, + [] + ].forEach((val) => { + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`) + }) + + // Test weird offset arguments. + // The following offsets coerce to NaN or 0, searching the whole Buffer + t.equal(b.indexOf('b', undefined), 1) + t.equal(b.indexOf('b', {}), 1) + t.equal(b.indexOf('b', 0), 1) + t.equal(b.indexOf('b', null), 1) + t.equal(b.indexOf('b', []), 1) + + // The following offset coerces to 2, in other words +[2] === 2 + t.equal(b.indexOf('b', [2]), -1) + + // Behavior should match String.indexOf() + t.equal( + b.indexOf('b', undefined), + stringComparison.indexOf('b', undefined)) + t.equal( + b.indexOf('b', {}), + stringComparison.indexOf('b', {})) + t.equal( + b.indexOf('b', 0), + stringComparison.indexOf('b', 0)) + t.equal( + b.indexOf('b', null), + stringComparison.indexOf('b', null)) + t.equal( + b.indexOf('b', []), + stringComparison.indexOf('b', [])) + t.equal( + b.indexOf('b', [2]), + stringComparison.indexOf('b', [2])) + + // test truncation of Number arguments to uint8 + { + const buf = Buffer.from('this is a test') + + t.equal(buf.indexOf(0x6973), 3) + t.equal(buf.indexOf(0x697320), 4) + t.equal(buf.indexOf(0x69732069), 2) + t.equal(buf.indexOf(0x697374657374), 0) + t.equal(buf.indexOf(0x69737374), 0) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(-140), 0) + t.equal(buf.indexOf(-152), 1) + t.equal(buf.indexOf(0xff), -1) + t.equal(buf.indexOf(0xffff), -1) + } + + // Test that Uint8Array arguments are okay. + { + const needle = new Uint8Array([0x66, 0x6f, 0x6f]) + const haystack = new BufferList(Buffer.from('a foo b foo')) + t.equal(haystack.indexOf(needle), 2) + } + + t.end() +}) diff --git a/node_modules/bl/test/isBufferList.js b/node_modules/bl/test/isBufferList.js new file mode 100644 index 000000000..9d895d59b --- /dev/null +++ b/node_modules/bl/test/isBufferList.js @@ -0,0 +1,32 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('isBufferList positives', (t) => { + t.ok(BufferList.isBufferList(new BufferList())) + t.ok(BufferList.isBufferList(new BufferListStream())) + + t.end() +}) + +tape('isBufferList negatives', (t) => { + const types = [ + null, + undefined, + NaN, + true, + false, + {}, + [], + Buffer.alloc(0), + [Buffer.alloc(0)] + ] + + for (const obj of types) { + t.notOk(BufferList.isBufferList(obj)) + } + + t.end() +}) diff --git a/node_modules/bl/test/test.js b/node_modules/bl/test/test.js new file mode 100644 index 000000000..e523d0c3f --- /dev/null +++ b/node_modules/bl/test/test.js @@ -0,0 +1,869 @@ +'use strict' + +const tape = require('tape') +const crypto = require('crypto') +const fs = require('fs') +const path = require('path') +const BufferList = require('../') +const { Buffer } = require('buffer') + +const encodings = + ('hex utf8 utf-8 ascii binary base64' + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') + +require('./indexOf') +require('./isBufferList') +require('./convert') + +tape('single bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + t.equal(bl.get(-1), undefined) + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), undefined) + + t.end() +}) + +tape('single bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), 101) + t.equal(bl.get(5), 102) + t.equal(bl.get(6), 103) + t.equal(bl.get(7), 104) + t.equal(bl.get(8), 105) + t.equal(bl.get(9), 106) + + t.end() +}) + +tape('multi bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') + t.equal(bl.slice(0, 3).toString('ascii'), 'abc') + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') + + t.end() +}) + +tape('multi bytes from single buffer (negative indexes)', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('buffer')) + + t.equal(bl.length, 6) + + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') + + t.end() +}) + +tape('multiple bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + t.equal(bl.slice(-7, -4).toString('ascii'), 'def') + + t.end() +}) + +tape('multiple bytes from multiple buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([Buffer.from('abcd'), Buffer.from('efg')])) + bl.append(new BufferList([Buffer.from('hi'), Buffer.from('j')])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +// same data as previous test, just using nested constructors +tape('multiple bytes from crazy nested buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([ + new BufferList([ + new BufferList(Buffer.from('abc')), + Buffer.from('d'), + new BufferList(Buffer.from('efg')) + ]), + new BufferList([Buffer.from('hi')]), + new BufferList(Buffer.from('j')) + ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +tape('append accepts arrays of Buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([Buffer.from('def')]) + bl.append([Buffer.from('ghi'), Buffer.from('jkl')]) + bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of Uint8Arrays', function (t) { + const bl = new BufferList() + + bl.append(new Uint8Array([97, 98, 99])) + bl.append([Uint8Array.from([100, 101, 102])]) + bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])]) + bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of BufferLists', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([new BufferList('def')]) + bl.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + bl.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append chainable', function (t) { + const bl = new BufferList() + + t.ok(bl.append(Buffer.from('abcd')) === bl) + t.ok(bl.append([Buffer.from('abcd')]) === bl) + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) + t.ok(bl.append([new BufferList(Buffer.from('abcd'))]) === bl) + + t.end() +}) + +tape('append chainable (test results)', function (t) { + const bl = new BufferList('abc') + .append([new BufferList('def')]) + .append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + .append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('consuming from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + bl.consume(3) + t.equal(bl.length, 7) + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') + + bl.consume(2) + t.equal(bl.length, 5) + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') + + bl.consume(1) + t.equal(bl.length, 4) + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') + + bl.consume(1) + t.equal(bl.length, 3) + t.equal(bl.slice(0, 3).toString('ascii'), 'hij') + + bl.consume(2) + t.equal(bl.length, 1) + t.equal(bl.slice(0, 1).toString('ascii'), 'j') + + t.end() +}) + +tape('complete consumption', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('a')) + bl.append(Buffer.from('b')) + + bl.consume(2) + + t.equal(bl.length, 0) + t.equal(bl._bufs.length, 0) + + t.end() +}) + +tape('test readUInt8 / readInt8', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt8(), 0x1) + t.equal(bl.readUInt8(2), 0x3) + t.equal(bl.readInt8(2), 0x3) + t.equal(bl.readUInt8(3), 0x4) + t.equal(bl.readInt8(3), 0x4) + t.equal(bl.readUInt8(4), 0x23) + t.equal(bl.readInt8(4), 0x23) + t.equal(bl.readUInt8(5), 0x42) + t.equal(bl.readInt8(5), 0x42) + + t.end() +}) + +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt16BE(), 0x0100) + t.equal(bl.readUInt16LE(), 0x0001) + t.equal(bl.readUInt16BE(2), 0x0304) + t.equal(bl.readUInt16LE(2), 0x0403) + t.equal(bl.readInt16BE(2), 0x0304) + t.equal(bl.readInt16LE(2), 0x0403) + t.equal(bl.readUInt16BE(3), 0x0423) + t.equal(bl.readUInt16LE(3), 0x2304) + t.equal(bl.readInt16BE(3), 0x0423) + t.equal(bl.readInt16LE(3), 0x2304) + t.equal(bl.readUInt16BE(4), 0x2342) + t.equal(bl.readUInt16LE(4), 0x4223) + t.equal(bl.readInt16BE(4), 0x2342) + t.equal(bl.readInt16LE(4), 0x4223) + + t.end() +}) + +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt32BE(), 0x01000304) + t.equal(bl.readUInt32LE(), 0x04030001) + t.equal(bl.readUInt32BE(2), 0x03042342) + t.equal(bl.readUInt32LE(2), 0x42230403) + t.equal(bl.readInt32BE(2), 0x03042342) + t.equal(bl.readInt32LE(2), 0x42230403) + + t.end() +}) + +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf2[0] = 0x2 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + buf3[2] = 0x61 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUIntBE(1, 1), 0x02) + t.equal(bl.readUIntBE(1, 2), 0x0203) + t.equal(bl.readUIntBE(1, 3), 0x020304) + t.equal(bl.readUIntBE(1, 4), 0x02030423) + t.equal(bl.readUIntBE(1, 5), 0x0203042342) + t.equal(bl.readUIntBE(1, 6), 0x020304234261) + t.equal(bl.readUIntLE(1, 1), 0x02) + t.equal(bl.readUIntLE(1, 2), 0x0302) + t.equal(bl.readUIntLE(1, 3), 0x040302) + t.equal(bl.readUIntLE(1, 4), 0x23040302) + t.equal(bl.readUIntLE(1, 5), 0x4223040302) + t.equal(bl.readUIntLE(1, 6), 0x614223040302) + t.equal(bl.readIntBE(1, 1), 0x02) + t.equal(bl.readIntBE(1, 2), 0x0203) + t.equal(bl.readIntBE(1, 3), 0x020304) + t.equal(bl.readIntBE(1, 4), 0x02030423) + t.equal(bl.readIntBE(1, 5), 0x0203042342) + t.equal(bl.readIntBE(1, 6), 0x020304234261) + t.equal(bl.readIntLE(1, 1), 0x02) + t.equal(bl.readIntLE(1, 2), 0x0302) + t.equal(bl.readIntLE(1, 3), 0x040302) + t.equal(bl.readIntLE(1, 4), 0x23040302) + t.equal(bl.readIntLE(1, 5), 0x4223040302) + t.equal(bl.readIntLE(1, 6), 0x614223040302) + + t.end() +}) + +tape('test readFloatLE / readFloatBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x00 + buf2[2] = 0x00 + buf3[0] = 0x80 + buf3[1] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readFloatLE(), canonical.readFloatLE()) + t.equal(bl.readFloatBE(), canonical.readFloatBE()) + t.equal(bl.readFloatLE(2), canonical.readFloatLE(2)) + t.equal(bl.readFloatBE(2), canonical.readFloatBE(2)) + + t.end() +}) + +tape('test readDoubleLE / readDoubleBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(10) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x55 + buf2[2] = 0x55 + buf3[0] = 0x55 + buf3[1] = 0x55 + buf3[2] = 0x55 + buf3[3] = 0x55 + buf3[4] = 0xd5 + buf3[5] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readDoubleBE(), canonical.readDoubleBE()) + t.equal(bl.readDoubleLE(), canonical.readDoubleLE()) + t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2)) + t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2)) + + t.end() +}) + +tape('test toString', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') + t.equal(bl.toString('ascii', 3, 10), 'defghij') + t.equal(bl.toString('ascii', 3, 6), 'def') + t.equal(bl.toString('ascii', 3, 8), 'defgh') + t.equal(bl.toString('ascii', 5, 10), 'fghij') + + t.end() +}) + +tape('test toString encoding', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + bl.append(Buffer.from('\xff\x00')) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc), enc) + }) + + t.end() +}) + +tape('uninitialized memory', function (t) { + const secret = crypto.randomBytes(256) + for (let i = 0; i < 1e6; i++) { + const clone = Buffer.from(secret) + const bl = new BufferList() + bl.append(Buffer.from('a')) + bl.consume(-1024) + const buf = bl.slice(1) + if (buf.indexOf(clone) !== -1) { + t.fail(`Match (at ${i})`) + break + } + } + t.end() +}) + +!process.browser && tape('test stream', function (t) { + const random = crypto.randomBytes(65534) + + const bl = new BufferList((err, buf) => { + t.ok(Buffer.isBuffer(buf)) + t.ok(err === null) + t.ok(random.equals(bl.slice())) + t.ok(random.equals(buf.slice())) + + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) + .on('close', function () { + const rndhash = crypto.createHash('md5').update(random).digest('hex') + const md5sum = crypto.createHash('md5') + const s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') + + s.on('data', md5sum.update.bind(md5sum)) + s.on('end', function () { + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') + t.end() + }) + }) + }) + + fs.writeFileSync('/tmp/bl_test_rnd.dat', random) + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) +}) + +tape('instantiation with Buffer', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = crypto.randomBytes(1024) + let b = BufferList(buf) + + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') + b = BufferList([buf, buf2]) + t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer') + + t.end() +}) + +tape('test String appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append('abcd') + bl.append('efg') + bl.append('hi') + bl.append('j') + bl.append('\xff\x00') + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('test Number appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('1234567890') + + bl.append(1234) + bl.append(567) + bl.append(89) + bl.append(0) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('write nothing, should get empty buffer', function (t) { + t.plan(3) + BufferList(function (err, data) { + t.notOk(err, 'no error') + t.ok(Buffer.isBuffer(data), 'got a buffer') + t.equal(0, data.length, 'got a zero-length buffer') + t.end() + }).end() +}) + +tape('unicode string', function (t) { + t.plan(2) + + const inp1 = '\u2600' + const inp2 = '\u2603' + const exp = inp1 + ' and ' + inp2 + const bl = BufferList() + + bl.write(inp1) + bl.write(' and ') + bl.write(inp2) + t.equal(exp, bl.toString()) + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) +}) + +tape('should emit finish', function (t) { + const source = BufferList() + const dest = BufferList() + + source.write('hello') + source.pipe(dest) + + dest.on('finish', function () { + t.equal(dest.toString('utf8'), 'hello') + t.end() + }) +}) + +tape('basic copy', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy after many appends', function (t) { + const buf = crypto.randomBytes(512) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy at a precise position', function (t) { + const buf = crypto.randomBytes(1004) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2, 20) + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy starting from a precise location', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(5) + const b = BufferList(buf) + + b.copy(buf2, 0, 5) + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy in an interval', function (t) { + const rnd = crypto.randomBytes(10) + const b = BufferList(rnd) // put the random bytes there + const actual = Buffer.alloc(3) + const expected = Buffer.alloc(3) + + rnd.copy(expected, 0, 5, 8) + b.copy(actual, 0, 5, 8) + + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy an interval between two buffers', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(10) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2, 0, 5, 15) + + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('shallow slice across buffer boundaries', function (t) { + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') + + t.end() +}) + +tape('shallow slice within single buffer', function (t) { + t.plan(2) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') + t.equal(bl.shallowSlice(7, 10).toString(), 'con') + + t.end() +}) + +tape('shallow slice single buffer', function (t) { + t.plan(3) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(0, 5).toString(), 'First') + t.equal(bl.shallowSlice(5, 11).toString(), 'Second') + t.equal(bl.shallowSlice(11, 16).toString(), 'Third') +}) + +tape('shallow slice with negative or omitted indices', function (t) { + t.plan(4) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') + t.equal(bl.shallowSlice(5).toString(), 'SecondThird') + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') + t.equal(bl.shallowSlice(-8).toString(), 'ondThird') +}) + +tape('shallow slice does not make a copy', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(5, -3) + + buffers[1].fill('h') + buffers[2].fill('h') + + t.equal(bl.toString(), 'hhhhhhhh') +}) + +tape('shallow slice with 0 length', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(0, 0) + + t.equal(bl.length, 0) +}) + +tape('shallow slice with 0 length from middle', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(10, 10) + + t.equal(bl.length, 0) +}) + +tape('duplicate', function (t) { + t.plan(2) + + const bl = new BufferList('abcdefghij\xff\x00') + const dup = bl.duplicate() + + t.equal(bl.prototype, dup.prototype) + t.equal(bl.toString('hex'), dup.toString('hex')) +}) + +tape('destroy no pipe', function (t) { + t.plan(2) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +tape('destroy with error', function (t) { + t.plan(3) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + const err = new Error('kaboom') + + bl.destroy(err) + bl.on('error', function (_err) { + t.equal(_err, err) + }) + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end', function (t) { + t.plan(2) + + const bl = new BufferList() + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end with race', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + setTimeout(function () { + bl.destroy() + setTimeout(function () { + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + }, 500) + }, 500) +}) + +!process.browser && tape('destroy with pipe after read end', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + } +}) + +!process.browser && tape('destroy with pipe while writing to a destination', function (t) { + t.plan(4) + + const bl = new BufferList() + const ds = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.pipe(ds) + + setTimeout(function () { + bl.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + ds.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + }, 100) + } +}) + +!process.browser && tape('handle error', function (t) { + t.plan(2) + + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { + t.ok(err instanceof Error, 'has error') + t.notOk(data, 'no data') + })) +}) diff --git a/node_modules/buffer/AUTHORS.md b/node_modules/buffer/AUTHORS.md new file mode 100644 index 000000000..22eb17129 --- /dev/null +++ b/node_modules/buffer/AUTHORS.md @@ -0,0 +1,70 @@ +# Authors + +#### Ordered by first contribution. + +- Romain Beauxis (toots@rastageeks.org) +- Tobias Koppers (tobias.koppers@googlemail.com) +- Janus (ysangkok@gmail.com) +- Rainer Dreyer (rdrey1@gmail.com) +- Tõnis Tiigi (tonistiigi@gmail.com) +- James Halliday (mail@substack.net) +- Michael Williamson (mike@zwobble.org) +- elliottcable (github@elliottcable.name) +- rafael (rvalle@livelens.net) +- Andrew Kelley (superjoe30@gmail.com) +- Andreas Madsen (amwebdk@gmail.com) +- Mike Brevoort (mike.brevoort@pearson.com) +- Brian White (mscdex@mscdex.net) +- Feross Aboukhadijeh (feross@feross.org) +- Ruben Verborgh (ruben@verborgh.org) +- eliang (eliang.cs@gmail.com) +- Jesse Tane (jesse.tane@gmail.com) +- Alfonso Boza (alfonso@cloud.com) +- Mathias Buus (mathiasbuus@gmail.com) +- Devon Govett (devongovett@gmail.com) +- Daniel Cousens (github@dcousens.com) +- Joseph Dykstra (josephdykstra@gmail.com) +- Parsha Pourkhomami (parshap+git@gmail.com) +- Damjan Košir (damjan.kosir@gmail.com) +- daverayment (dave.rayment@gmail.com) +- kawanet (u-suke@kawa.net) +- Linus Unnebäck (linus@folkdatorn.se) +- Nolan Lawson (nolan.lawson@gmail.com) +- Calvin Metcalf (calvin.metcalf@gmail.com) +- Koki Takahashi (hakatasiloving@gmail.com) +- Guy Bedford (guybedford@gmail.com) +- Jan Schär (jscissr@gmail.com) +- RaulTsc (tomescu.raul@gmail.com) +- Matthieu Monsch (monsch@alum.mit.edu) +- Dan Ehrenberg (littledan@chromium.org) +- Kirill Fomichev (fanatid@ya.ru) +- Yusuke Kawasaki (u-suke@kawa.net) +- DC (dcposch@dcpos.ch) +- John-David Dalton (john.david.dalton@gmail.com) +- adventure-yunfei (adventure030@gmail.com) +- Emil Bay (github@tixz.dk) +- Sam Sudar (sudar.sam@gmail.com) +- Volker Mische (volker.mische@gmail.com) +- David Walton (support@geekstocks.com) +- Сковорода Никита Андреевич (chalkerx@gmail.com) +- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) +- ukstv (sergey.ukustov@machinomy.com) +- Renée Kooi (renee@kooi.me) +- ranbochen (ranbochen@qq.com) +- Vladimir Borovik (bobahbdb@gmail.com) +- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) +- kumavis (aaron@kumavis.me) +- Sergey Ukustov (sergey.ukustov@machinomy.com) +- Fei Liu (liu.feiwood@gmail.com) +- Blaine Bublitz (blaine.bublitz@gmail.com) +- clement (clement@seald.io) +- Koushik Dutta (koushd@gmail.com) +- Jordan Harband (ljharb@gmail.com) +- Niklas Mischkulnig (mischnic@users.noreply.github.com) +- Nikolai Vavilov (vvnicholas@gmail.com) +- Fedor Nezhivoi (gyzerok@users.noreply.github.com) +- Peter Newman (peternewman@users.noreply.github.com) +- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) +- jkkang (jkkang@smartauth.kr) + +#### Generated by bin/update-authors.sh. diff --git a/node_modules/buffer/LICENSE b/node_modules/buffer/LICENSE new file mode 100644 index 000000000..d6bf75dcf --- /dev/null +++ b/node_modules/buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh, and other contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/buffer/README.md b/node_modules/buffer/README.md new file mode 100644 index 000000000..9a23d7cfa --- /dev/null +++ b/node_modules/buffer/README.md @@ -0,0 +1,410 @@ +# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg +[travis-url]: https://travis-ci.org/feross/buffer +[npm-image]: https://img.shields.io/npm/v/buffer.svg +[npm-url]: https://npmjs.org/package/buffer +[downloads-image]: https://img.shields.io/npm/dm/buffer.svg +[downloads-url]: https://npmjs.org/package/buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### The buffer module from [node.js](https://nodejs.org/), for the browser. + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg +[saucelabs-url]: https://saucelabs.com/u/buffer + +With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module. + +The goal is to provide an API that is 100% identical to +[node's Buffer API](https://nodejs.org/api/buffer.html). Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +## features + +- Manipulate binary data like a boss, in all browsers! +- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`) +- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments) +- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.) +- Preserves Node API exactly, with one minor difference (see below) +- Square-bracket `buf[4]` notation works! +- Does not modify any browser prototypes or put anything on `window` +- Comprehensive test suite (including all buffer tests from node.js core) + +## install + +To use this module directly (without browserify), install it: + +```bash +npm install buffer +``` + +This module was previously called **native-buffer-browserify**, but please use **buffer** +from now on. + +If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer). + +## usage + +The module's API is identical to node's `Buffer` API. Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +As mentioned above, `require('buffer')` or use the `Buffer` global with +[browserify](http://browserify.org) and this module will automatically be included +in your bundle. Almost any npm module will work in the browser, even if it assumes that +the node `Buffer` API will be available. + +To depend on this module explicitly (without browserify), require it like this: + +```js +var Buffer = require('buffer/').Buffer // note: the trailing slash is important! +``` + +To require this module explicitly, use `require('buffer/')` which tells the node.js module +lookup algorithm (also used by browserify) to use the **npm module** named `buffer` +instead of the **node.js core** module named `buffer`! + + +## how does it work? + +The Buffer constructor returns instances of `Uint8Array` that have their prototype +changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`, +so the returned instances will have all the node `Buffer` methods and the +`Uint8Array` methods. Square bracket notation works as expected -- it returns a +single octet. + +The `Uint8Array` prototype remains unmodified. + + +## tracking the latest node api + +This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer +API is considered **stable** in the +[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index), +so it is unlikely that there will ever be breaking changes. +Nonetheless, when/if the Buffer API changes in node, this module's API will change +accordingly. + +## related packages + +- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer +- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer +- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package + +## conversion packages + +### convert typed array to buffer + +Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast. + +### convert buffer to typed array + +`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`. + +### convert blob to buffer + +Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`. + +### convert buffer to blob + +To convert a `Buffer` to a `Blob`, use the `Blob` constructor: + +```js +var blob = new Blob([ buffer ]) +``` + +Optionally, specify a mimetype: + +```js +var blob = new Blob([ buffer ], { type: 'text/html' }) +``` + +### convert arraybuffer to buffer + +To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast. + +```js +var buffer = Buffer.from(arrayBuffer) +``` + +### convert buffer to arraybuffer + +To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects): + +```js +var arrayBuffer = buffer.buffer.slice( + buffer.byteOffset, buffer.byteOffset + buffer.byteLength +) +``` + +Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module. + +## performance + +See perf tests in `/perf`. + +`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a +sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will +always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module, +which is included to compare against. + +NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README. + +### Chrome 38 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ | +| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | | +| | | | | +| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | | +| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | | +| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | | +| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | | +| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | | +| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ | +| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | | +| | | | | +| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ | +| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | | +| | | | | +| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ | +| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | | +| | | | | +| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | | +| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | | +| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ | + + +### Firefox 33 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | | +| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ | +| | | | | +| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | | +| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | | +| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | | +| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | | +| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | | +| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | | +| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | | +| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ | +| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | | +| | | | | +| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | | +| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | | +| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ | + +### Safari 8 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ | +| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | | +| | | | | +| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | | +| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | | +| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | | +| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | | +| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | | +| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | | +| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | | +| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | | +| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ | +| | | | | +| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | | +| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | | +| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ | + + +### Node 0.11.14 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | | +| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ | +| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | | +| | | | | +| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | | +| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ | +| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | | +| | | | | +| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | | +| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ | +| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | | +| | | | | +| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | | +| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ | +| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | | +| | | | | +| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | | +| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | | +| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | | +| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ | +| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | | +| | | | | +| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ | +| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | | +| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | | +| | | | | +| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ | +| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | | +| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | | +| | | | | +| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | | +| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | | +| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ | +| | | | | +| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | | +| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ | +| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | | +| | | | | +| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | | +| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ | +| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | | + +### iojs 1.8.1 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | | +| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | | +| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ | +| | | | | +| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | | +| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | | +| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | | +| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | | +| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | | +| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ | +| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | | +| | | | | +| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | | +| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | | +| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | | +| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ | +| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | | +| | | | | +| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ | +| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | | +| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | | +| | | | | +| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ | +| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | | +| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | | +| | | | | +| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | | +| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | | +| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ | +| | | | | +| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | | +| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | | +| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | | +| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ | +| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | | +| | | | | + +## Testing the project + +First, install the project: + + npm install + +Then, to run tests in Node.js, run: + + npm run test-node + +To test locally in a browser, you can run: + + npm run test-browser-es5-local # For ES5 browsers that don't support ES6 + npm run test-browser-es6-local # For ES6 compliant browsers + +This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap). + +To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run: + + npm test + +This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files. + +## JavaScript Standard Style + +This module uses [JavaScript Standard Style](https://github.com/feross/standard). + +[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +To test that the code conforms to the style, `npm install` and run: + + ./node_modules/.bin/standard + +## credit + +This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify). + +## Security Policies and Procedures + +The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues. + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis. diff --git a/node_modules/buffer/index.d.ts b/node_modules/buffer/index.d.ts new file mode 100644 index 000000000..5d1a804e5 --- /dev/null +++ b/node_modules/buffer/index.d.ts @@ -0,0 +1,186 @@ +export class Buffer extends Uint8Array { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + reverse(): this; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer | Uint8Array): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initializing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; +} diff --git a/node_modules/buffer/index.js b/node_modules/buffer/index.js new file mode 100644 index 000000000..609cf3113 --- /dev/null +++ b/node_modules/buffer/index.js @@ -0,0 +1,1817 @@ +/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + +'use strict' + +var base64 = require('base64-js') +var ieee754 = require('ieee754') +var customInspectSymbol = + (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation + ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation + : null + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +var K_MAX_LENGTH = 0x7fffffff +exports.kMaxLength = K_MAX_LENGTH + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Print warning and recommend using `buffer` v4.x which has an Object + * implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * We report that the browser does not support typed arrays if the are not subclassable + * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` + * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support + * for __proto__ and has a buggy typed array implementation. + */ +Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() + +if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && + typeof console.error === 'function') { + console.error( + 'This browser lacks typed array (Uint8Array) support which is required by ' + + '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' + ) +} + +function typedArraySupport () { + // Can typed array instances can be augmented? + try { + var arr = new Uint8Array(1) + var proto = { foo: function () { return 42 } } + Object.setPrototypeOf(proto, Uint8Array.prototype) + Object.setPrototypeOf(arr, proto) + return arr.foo() === 42 + } catch (e) { + return false + } +} + +Object.defineProperty(Buffer.prototype, 'parent', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.buffer + } +}) + +Object.defineProperty(Buffer.prototype, 'offset', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.byteOffset + } +}) + +function createBuffer (length) { + if (length > K_MAX_LENGTH) { + throw new RangeError('The value "' + length + '" is invalid for option "size"') + } + // Return an augmented `Uint8Array` instance + var buf = new Uint8Array(length) + Object.setPrototypeOf(buf, Buffer.prototype) + return buf +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new TypeError( + 'The "string" argument must be of type string. Received type number' + ) + } + return allocUnsafe(arg) + } + return from(arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +function from (value, encodingOrOffset, length) { + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + if (ArrayBuffer.isView(value)) { + return fromArrayView(value) + } + + if (value == null) { + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) + } + + if (isInstance(value, ArrayBuffer) || + (value && isInstance(value.buffer, ArrayBuffer))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof SharedArrayBuffer !== 'undefined' && + (isInstance(value, SharedArrayBuffer) || + (value && isInstance(value.buffer, SharedArrayBuffer)))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'number') { + throw new TypeError( + 'The "value" argument must not be of type number. Received type number' + ) + } + + var valueOf = value.valueOf && value.valueOf() + if (valueOf != null && valueOf !== value) { + return Buffer.from(valueOf, encodingOrOffset, length) + } + + var b = fromObject(value) + if (b) return b + + if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && + typeof value[Symbol.toPrimitive] === 'function') { + return Buffer.from( + value[Symbol.toPrimitive]('string'), encodingOrOffset, length + ) + } + + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(value, encodingOrOffset, length) +} + +// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: +// https://github.com/feross/buffer/pull/148 +Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) +Object.setPrototypeOf(Buffer, Uint8Array) + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be of type number') + } else if (size < 0) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } +} + +function alloc (size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpreted as a start offset. + return typeof encoding === 'string' + ? createBuffer(size).fill(fill, encoding) + : createBuffer(size).fill(fill) + } + return createBuffer(size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(size, fill, encoding) +} + +function allocUnsafe (size) { + assertSize(size) + return createBuffer(size < 0 ? 0 : checked(size) | 0) +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(size) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + + var length = byteLength(string, encoding) | 0 + var buf = createBuffer(length) + + var actual = buf.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + buf = buf.slice(0, actual) + } + + return buf +} + +function fromArrayLike (array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + var buf = createBuffer(length) + for (var i = 0; i < length; i += 1) { + buf[i] = array[i] & 255 + } + return buf +} + +function fromArrayView (arrayView) { + if (isInstance(arrayView, Uint8Array)) { + var copy = new Uint8Array(arrayView) + return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) + } + return fromArrayLike(arrayView) +} + +function fromArrayBuffer (array, byteOffset, length) { + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('"offset" is outside of buffer bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('"length" is outside of buffer bounds') + } + + var buf + if (byteOffset === undefined && length === undefined) { + buf = new Uint8Array(array) + } else if (length === undefined) { + buf = new Uint8Array(array, byteOffset) + } else { + buf = new Uint8Array(array, byteOffset, length) + } + + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(buf, Buffer.prototype) + + return buf +} + +function fromObject (obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + var buf = createBuffer(len) + + if (buf.length === 0) { + return buf + } + + obj.copy(buf, 0, 0, len) + return buf + } + + if (obj.length !== undefined) { + if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { + return createBuffer(0) + } + return fromArrayLike(obj) + } + + if (obj.type === 'Buffer' && Array.isArray(obj.data)) { + return fromArrayLike(obj.data) + } +} + +function checked (length) { + // Note: cannot use `length < K_MAX_LENGTH` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= K_MAX_LENGTH) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return b != null && b._isBuffer === true && + b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false +} + +Buffer.compare = function compare (a, b) { + if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) + if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError( + 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' + ) + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!Array.isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (isInstance(buf, Uint8Array)) { + if (pos + buf.length > buffer.length) { + Buffer.from(buf).copy(buffer, pos) + } else { + Uint8Array.prototype.set.call( + buffer, + buf, + pos + ) + } + } else if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } else { + buf.copy(buffer, pos) + } + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + throw new TypeError( + 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + + 'Received type ' + typeof string + ) + } + + var len = string.length + var mustMatch = (arguments.length > 2 && arguments[2] === true) + if (!mustMatch && len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) { + return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 + } + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coercion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) +// to detect a Buffer instance. It's not possible to use `instanceof Buffer` +// reliably in a browserify context because there could be multiple different +// copies of the 'buffer' package in use. This method works even for Buffer +// instances that were created from another copy of the `buffer` package. +// See: https://github.com/feross/buffer/issues/154 +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.toLocaleString = Buffer.prototype.toString + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() + if (this.length > max) str += ' ... ' + return '' +} +if (customInspectSymbol) { + Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (isInstance(target, Uint8Array)) { + target = Buffer.from(target, target.offset, target.byteLength) + } + if (!Buffer.isBuffer(target)) { + throw new TypeError( + 'The "target" argument must be one of type Buffer or Uint8Array. ' + + 'Received type ' + (typeof target) + ) + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (numberIsNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + var strLen = string.length + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (numberIsNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset >>> 0 + if (isFinite(length)) { + length = length >>> 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + case 'latin1': + case 'binary': + return asciiWrite(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) + ? 4 + : (firstByte > 0xDF) + ? 3 + : (firstByte > 0xBF) + ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]] + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) + for (var i = 0; i < bytes.length - 1; i += 2) { + res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf = this.subarray(start, end) + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(newBuf, Buffer.prototype) + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUintLE = +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUintBE = +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUint8 = +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUint16LE = +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUint16BE = +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUint32LE = +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUint32BE = +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUintLE = +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUintBE = +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUint8 = +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeUint16LE = +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeUint16BE = +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeUint32LE = +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeUint32BE = +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('Index out of range') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + + if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { + // Use built-in when available, missing from IE11 + this.copyWithin(targetStart, start, end) + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, end), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if ((encoding === 'utf8' && code < 128) || + encoding === 'latin1') { + // Fast path: If `val` fits into a single byte, use that numeric value. + val = code + } + } + } else if (typeof val === 'number') { + val = val & 255 + } else if (typeof val === 'boolean') { + val = Number(val) + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : Buffer.from(val, encoding) + var len = bytes.length + if (len === 0) { + throw new TypeError('The value "' + val + + '" is invalid for argument "value"') + } + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node takes equal signs as end of the Base64 encoding + str = str.split('=')[0] + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = str.trim().replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass +// the `instanceof` check but they should be treated as of that type. +// See: https://github.com/feross/buffer/issues/166 +function isInstance (obj, type) { + return obj instanceof type || + (obj != null && obj.constructor != null && obj.constructor.name != null && + obj.constructor.name === type.name) +} +function numberIsNaN (obj) { + // For IE11 support + return obj !== obj // eslint-disable-line no-self-compare +} + +// Create lookup table for `toString('hex')` +// See: https://github.com/feross/buffer/issues/219 +var hexSliceLookupTable = (function () { + var alphabet = '0123456789abcdef' + var table = new Array(256) + for (var i = 0; i < 16; ++i) { + var i16 = i * 16 + for (var j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j] + } + } + return table +})() diff --git a/node_modules/buffer/package.json b/node_modules/buffer/package.json new file mode 100644 index 000000000..3b1b4986f --- /dev/null +++ b/node_modules/buffer/package.json @@ -0,0 +1,96 @@ +{ + "name": "buffer", + "description": "Node.js Buffer API, for the browser", + "version": "5.7.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/buffer/issues" + }, + "contributors": [ + "Romain Beauxis ", + "James Halliday " + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + }, + "devDependencies": { + "airtap": "^3.0.0", + "benchmark": "^2.1.4", + "browserify": "^17.0.0", + "concat-stream": "^2.0.0", + "hyperquest": "^2.1.3", + "is-buffer": "^2.0.4", + "is-nan": "^1.3.0", + "split": "^1.0.1", + "standard": "*", + "tape": "^5.0.1", + "through2": "^4.0.2", + "uglify-js": "^3.11.3" + }, + "homepage": "https://github.com/feross/buffer", + "jspm": { + "map": { + "./index.js": { + "node": "@node/buffer" + } + } + }, + "keywords": [ + "arraybuffer", + "browser", + "browserify", + "buffer", + "compatible", + "dataview", + "uint8array" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/buffer.git" + }, + "scripts": { + "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", + "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", + "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", + "test": "standard && node ./bin/test.js", + "test-browser-es5": "airtap -- test/*.js", + "test-browser-es5-local": "airtap --local -- test/*.js", + "test-browser-es6": "airtap -- test/*.js test/node/*.js", + "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js", + "test-node": "tape test/*.js test/node/*.js", + "update-authors": "./bin/update-authors.sh" + }, + "standard": { + "ignore": [ + "test/node/**/*.js", + "test/common.js", + "test/_polyfill.js", + "perf/**/*.js" + ], + "globals": [ + "SharedArrayBuffer" + ] + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/chownr/LICENSE b/node_modules/chownr/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/node_modules/chownr/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/chownr/README.md b/node_modules/chownr/README.md new file mode 100644 index 000000000..70e9a54a3 --- /dev/null +++ b/node_modules/chownr/README.md @@ -0,0 +1,3 @@ +Like `chown -R`. + +Takes the same arguments as `fs.chown()` diff --git a/node_modules/chownr/chownr.js b/node_modules/chownr/chownr.js new file mode 100644 index 000000000..0d4093216 --- /dev/null +++ b/node_modules/chownr/chownr.js @@ -0,0 +1,167 @@ +'use strict' +const fs = require('fs') +const path = require('path') + +/* istanbul ignore next */ +const LCHOWN = fs.lchown ? 'lchown' : 'chown' +/* istanbul ignore next */ +const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' + +/* istanbul ignore next */ +const needEISDIRHandled = fs.lchown && + !process.version.match(/v1[1-9]+\./) && + !process.version.match(/v10\.[6-9]/) + +const lchownSync = (path, uid, gid) => { + try { + return fs[LCHOWNSYNC](path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const chownSync = (path, uid, gid) => { + try { + return fs.chownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const handleEISDIR = + needEISDIRHandled ? (path, uid, gid, cb) => er => { + // Node prior to v10 had a very questionable implementation of + // fs.lchown, which would always try to call fs.open on a directory + // Fall back to fs.chown in those cases. + if (!er || er.code !== 'EISDIR') + cb(er) + else + fs.chown(path, uid, gid, cb) + } + : (_, __, ___, cb) => cb + +/* istanbul ignore next */ +const handleEISDirSync = + needEISDIRHandled ? (path, uid, gid) => { + try { + return lchownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'EISDIR') + throw er + chownSync(path, uid, gid) + } + } + : (path, uid, gid) => lchownSync(path, uid, gid) + +// fs.readdir could only accept an options object as of node v6 +const nodeVersion = process.version +let readdir = (path, options, cb) => fs.readdir(path, options, cb) +let readdirSync = (path, options) => fs.readdirSync(path, options) +/* istanbul ignore next */ +if (/^v4\./.test(nodeVersion)) + readdir = (path, options, cb) => fs.readdir(path, cb) + +const chown = (cpath, uid, gid, cb) => { + fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er.code !== 'ENOENT' ? er : null) + })) +} + +const chownrKid = (p, child, uid, gid, cb) => { + if (typeof child === 'string') + return fs.lstat(path.resolve(p, child), (er, stats) => { + // Skip ENOENT error + if (er) + return cb(er.code !== 'ENOENT' ? er : null) + stats.name = child + chownrKid(p, stats, uid, gid, cb) + }) + + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, er => { + if (er) + return cb(er) + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + }) + } else { + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + } +} + + +const chownr = (p, uid, gid, cb) => { + readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb() + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er) + } + if (er || !children.length) + return chown(p, uid, gid, cb) + + let len = children.length + let errState = null + const then = er => { + if (errState) + return + if (er) + return cb(errState = er) + if (-- len === 0) + return chown(p, uid, gid, cb) + } + + children.forEach(child => chownrKid(p, child, uid, gid, then)) + }) +} + +const chownrKidSync = (p, child, uid, gid) => { + if (typeof child === 'string') { + try { + const stats = fs.lstatSync(path.resolve(p, child)) + stats.name = child + child = stats + } catch (er) { + if (er.code === 'ENOENT') + return + else + throw er + } + } + + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid) + + handleEISDirSync(path.resolve(p, child.name), uid, gid) +} + +const chownrSync = (p, uid, gid) => { + let children + try { + children = readdirSync(p, { withFileTypes: true }) + } catch (er) { + if (er.code === 'ENOENT') + return + else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP') + return handleEISDirSync(p, uid, gid) + else + throw er + } + + if (children && children.length) + children.forEach(child => chownrKidSync(p, child, uid, gid)) + + return handleEISDirSync(p, uid, gid) +} + +module.exports = chownr +chownr.sync = chownrSync diff --git a/node_modules/chownr/package.json b/node_modules/chownr/package.json new file mode 100644 index 000000000..c273a7d17 --- /dev/null +++ b/node_modules/chownr/package.json @@ -0,0 +1,29 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "chownr", + "description": "like `chown -R`", + "version": "1.1.4", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/chownr.git" + }, + "main": "chownr.js", + "files": [ + "chownr.js" + ], + "devDependencies": { + "mkdirp": "0.3", + "rimraf": "^2.7.1", + "tap": "^14.10.6" + }, + "tap": { + "check-coverage": true + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "license": "ISC" +} diff --git a/node_modules/color-convert/CHANGELOG.md b/node_modules/color-convert/CHANGELOG.md new file mode 100644 index 000000000..0a7bce4fd --- /dev/null +++ b/node_modules/color-convert/CHANGELOG.md @@ -0,0 +1,54 @@ +# 1.0.0 - 2016-01-07 + +- Removed: unused speed test +- Added: Automatic routing between previously unsupported conversions +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Removed: `xxx2xxx()` and `xxx2xxxRaw()` functions +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Removed: `convert()` class +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Changed: all functions to lookup dictionary +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Changed: `ansi` to `ansi256` +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Fixed: argument grouping for functions requiring only one argument +([#27](https://github.com/Qix-/color-convert/pull/27)) + +# 0.6.0 - 2015-07-23 + +- Added: methods to handle +[ANSI](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors) 16/256 colors: + - rgb2ansi16 + - rgb2ansi + - hsl2ansi16 + - hsl2ansi + - hsv2ansi16 + - hsv2ansi + - hwb2ansi16 + - hwb2ansi + - cmyk2ansi16 + - cmyk2ansi + - keyword2ansi16 + - keyword2ansi + - ansi162rgb + - ansi162hsl + - ansi162hsv + - ansi162hwb + - ansi162cmyk + - ansi162keyword + - ansi2rgb + - ansi2hsl + - ansi2hsv + - ansi2hwb + - ansi2cmyk + - ansi2keyword +([#18](https://github.com/harthur/color-convert/pull/18)) + +# 0.5.3 - 2015-06-02 + +- Fixed: hsl2hsv does not return `NaN` anymore when using `[0,0,0]` +([#15](https://github.com/harthur/color-convert/issues/15)) + +--- + +Check out commit logs for older releases diff --git a/node_modules/color-convert/LICENSE b/node_modules/color-convert/LICENSE new file mode 100644 index 000000000..5b4c386f9 --- /dev/null +++ b/node_modules/color-convert/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011-2016 Heather Arthur + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/color-convert/README.md b/node_modules/color-convert/README.md new file mode 100644 index 000000000..d4b08fc36 --- /dev/null +++ b/node_modules/color-convert/README.md @@ -0,0 +1,68 @@ +# color-convert + +[![Build Status](https://travis-ci.org/Qix-/color-convert.svg?branch=master)](https://travis-ci.org/Qix-/color-convert) + +Color-convert is a color conversion library for JavaScript and node. +It converts all ways between `rgb`, `hsl`, `hsv`, `hwb`, `cmyk`, `ansi`, `ansi16`, `hex` strings, and CSS `keyword`s (will round to closest): + +```js +var convert = require('color-convert'); + +convert.rgb.hsl(140, 200, 100); // [96, 48, 59] +convert.keyword.rgb('blue'); // [0, 0, 255] + +var rgbChannels = convert.rgb.channels; // 3 +var cmykChannels = convert.cmyk.channels; // 4 +var ansiChannels = convert.ansi16.channels; // 1 +``` + +# Install + +```console +$ npm install color-convert +``` + +# API + +Simply get the property of the _from_ and _to_ conversion that you're looking for. + +All functions have a rounded and unrounded variant. By default, return values are rounded. To get the unrounded (raw) results, simply tack on `.raw` to the function. + +All 'from' functions have a hidden property called `.channels` that indicates the number of channels the function expects (not including alpha). + +```js +var convert = require('color-convert'); + +// Hex to LAB +convert.hex.lab('DEADBF'); // [ 76, 21, -2 ] +convert.hex.lab.raw('DEADBF'); // [ 75.56213190997677, 20.653827952644754, -2.290532499330533 ] + +// RGB to CMYK +convert.rgb.cmyk(167, 255, 4); // [ 35, 0, 98, 0 ] +convert.rgb.cmyk.raw(167, 255, 4); // [ 34.509803921568626, 0, 98.43137254901961, 0 ] +``` + +### Arrays +All functions that accept multiple arguments also support passing an array. + +Note that this does **not** apply to functions that convert from a color that only requires one value (e.g. `keyword`, `ansi256`, `hex`, etc.) + +```js +var convert = require('color-convert'); + +convert.rgb.hex(123, 45, 67); // '7B2D43' +convert.rgb.hex([123, 45, 67]); // '7B2D43' +``` + +## Routing + +Conversions that don't have an _explicitly_ defined conversion (in [conversions.js](conversions.js)), but can be converted by means of sub-conversions (e.g. XYZ -> **RGB** -> CMYK), are automatically routed together. This allows just about any color model supported by `color-convert` to be converted to any other model, so long as a sub-conversion path exists. This is also true for conversions requiring more than one step in between (e.g. LCH -> **LAB** -> **XYZ** -> **RGB** -> Hex). + +Keep in mind that extensive conversions _may_ result in a loss of precision, and exist only to be complete. For a list of "direct" (single-step) conversions, see [conversions.js](conversions.js). + +# Contribute + +If there is a new model you would like to support, or want to add a direct conversion between two existing models, please send us a pull request. + +# License +Copyright © 2011-2016, Heather Arthur and Josh Junon. Licensed under the [MIT License](LICENSE). diff --git a/node_modules/color-convert/conversions.js b/node_modules/color-convert/conversions.js new file mode 100644 index 000000000..2657f265c --- /dev/null +++ b/node_modules/color-convert/conversions.js @@ -0,0 +1,839 @@ +/* MIT license */ +/* eslint-disable no-mixed-operators */ +const cssKeywords = require('color-name'); + +// NOTE: conversions should only return primitive values (i.e. arrays, or +// values that give correct `typeof` results). +// do not use box values types (i.e. Number(), String(), etc.) + +const reverseKeywords = {}; +for (const key of Object.keys(cssKeywords)) { + reverseKeywords[cssKeywords[key]] = key; +} + +const convert = { + rgb: {channels: 3, labels: 'rgb'}, + hsl: {channels: 3, labels: 'hsl'}, + hsv: {channels: 3, labels: 'hsv'}, + hwb: {channels: 3, labels: 'hwb'}, + cmyk: {channels: 4, labels: 'cmyk'}, + xyz: {channels: 3, labels: 'xyz'}, + lab: {channels: 3, labels: 'lab'}, + lch: {channels: 3, labels: 'lch'}, + hex: {channels: 1, labels: ['hex']}, + keyword: {channels: 1, labels: ['keyword']}, + ansi16: {channels: 1, labels: ['ansi16']}, + ansi256: {channels: 1, labels: ['ansi256']}, + hcg: {channels: 3, labels: ['h', 'c', 'g']}, + apple: {channels: 3, labels: ['r16', 'g16', 'b16']}, + gray: {channels: 1, labels: ['gray']} +}; + +module.exports = convert; + +// Hide .channels and .labels properties +for (const model of Object.keys(convert)) { + if (!('channels' in convert[model])) { + throw new Error('missing channels property: ' + model); + } + + if (!('labels' in convert[model])) { + throw new Error('missing channel labels property: ' + model); + } + + if (convert[model].labels.length !== convert[model].channels) { + throw new Error('channel and label counts mismatch: ' + model); + } + + const {channels, labels} = convert[model]; + delete convert[model].channels; + delete convert[model].labels; + Object.defineProperty(convert[model], 'channels', {value: channels}); + Object.defineProperty(convert[model], 'labels', {value: labels}); +} + +convert.rgb.hsl = function (rgb) { + const r = rgb[0] / 255; + const g = rgb[1] / 255; + const b = rgb[2] / 255; + const min = Math.min(r, g, b); + const max = Math.max(r, g, b); + const delta = max - min; + let h; + let s; + + if (max === min) { + h = 0; + } else if (r === max) { + h = (g - b) / delta; + } else if (g === max) { + h = 2 + (b - r) / delta; + } else if (b === max) { + h = 4 + (r - g) / delta; + } + + h = Math.min(h * 60, 360); + + if (h < 0) { + h += 360; + } + + const l = (min + max) / 2; + + if (max === min) { + s = 0; + } else if (l <= 0.5) { + s = delta / (max + min); + } else { + s = delta / (2 - max - min); + } + + return [h, s * 100, l * 100]; +}; + +convert.rgb.hsv = function (rgb) { + let rdif; + let gdif; + let bdif; + let h; + let s; + + const r = rgb[0] / 255; + const g = rgb[1] / 255; + const b = rgb[2] / 255; + const v = Math.max(r, g, b); + const diff = v - Math.min(r, g, b); + const diffc = function (c) { + return (v - c) / 6 / diff + 1 / 2; + }; + + if (diff === 0) { + h = 0; + s = 0; + } else { + s = diff / v; + rdif = diffc(r); + gdif = diffc(g); + bdif = diffc(b); + + if (r === v) { + h = bdif - gdif; + } else if (g === v) { + h = (1 / 3) + rdif - bdif; + } else if (b === v) { + h = (2 / 3) + gdif - rdif; + } + + if (h < 0) { + h += 1; + } else if (h > 1) { + h -= 1; + } + } + + return [ + h * 360, + s * 100, + v * 100 + ]; +}; + +convert.rgb.hwb = function (rgb) { + const r = rgb[0]; + const g = rgb[1]; + let b = rgb[2]; + const h = convert.rgb.hsl(rgb)[0]; + const w = 1 / 255 * Math.min(r, Math.min(g, b)); + + b = 1 - 1 / 255 * Math.max(r, Math.max(g, b)); + + return [h, w * 100, b * 100]; +}; + +convert.rgb.cmyk = function (rgb) { + const r = rgb[0] / 255; + const g = rgb[1] / 255; + const b = rgb[2] / 255; + + const k = Math.min(1 - r, 1 - g, 1 - b); + const c = (1 - r - k) / (1 - k) || 0; + const m = (1 - g - k) / (1 - k) || 0; + const y = (1 - b - k) / (1 - k) || 0; + + return [c * 100, m * 100, y * 100, k * 100]; +}; + +function comparativeDistance(x, y) { + /* + See https://en.m.wikipedia.org/wiki/Euclidean_distance#Squared_Euclidean_distance + */ + return ( + ((x[0] - y[0]) ** 2) + + ((x[1] - y[1]) ** 2) + + ((x[2] - y[2]) ** 2) + ); +} + +convert.rgb.keyword = function (rgb) { + const reversed = reverseKeywords[rgb]; + if (reversed) { + return reversed; + } + + let currentClosestDistance = Infinity; + let currentClosestKeyword; + + for (const keyword of Object.keys(cssKeywords)) { + const value = cssKeywords[keyword]; + + // Compute comparative distance + const distance = comparativeDistance(rgb, value); + + // Check if its less, if so set as closest + if (distance < currentClosestDistance) { + currentClosestDistance = distance; + currentClosestKeyword = keyword; + } + } + + return currentClosestKeyword; +}; + +convert.keyword.rgb = function (keyword) { + return cssKeywords[keyword]; +}; + +convert.rgb.xyz = function (rgb) { + let r = rgb[0] / 255; + let g = rgb[1] / 255; + let b = rgb[2] / 255; + + // Assume sRGB + r = r > 0.04045 ? (((r + 0.055) / 1.055) ** 2.4) : (r / 12.92); + g = g > 0.04045 ? (((g + 0.055) / 1.055) ** 2.4) : (g / 12.92); + b = b > 0.04045 ? (((b + 0.055) / 1.055) ** 2.4) : (b / 12.92); + + const x = (r * 0.4124) + (g * 0.3576) + (b * 0.1805); + const y = (r * 0.2126) + (g * 0.7152) + (b * 0.0722); + const z = (r * 0.0193) + (g * 0.1192) + (b * 0.9505); + + return [x * 100, y * 100, z * 100]; +}; + +convert.rgb.lab = function (rgb) { + const xyz = convert.rgb.xyz(rgb); + let x = xyz[0]; + let y = xyz[1]; + let z = xyz[2]; + + x /= 95.047; + y /= 100; + z /= 108.883; + + x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116); + y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116); + z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116); + + const l = (116 * y) - 16; + const a = 500 * (x - y); + const b = 200 * (y - z); + + return [l, a, b]; +}; + +convert.hsl.rgb = function (hsl) { + const h = hsl[0] / 360; + const s = hsl[1] / 100; + const l = hsl[2] / 100; + let t2; + let t3; + let val; + + if (s === 0) { + val = l * 255; + return [val, val, val]; + } + + if (l < 0.5) { + t2 = l * (1 + s); + } else { + t2 = l + s - l * s; + } + + const t1 = 2 * l - t2; + + const rgb = [0, 0, 0]; + for (let i = 0; i < 3; i++) { + t3 = h + 1 / 3 * -(i - 1); + if (t3 < 0) { + t3++; + } + + if (t3 > 1) { + t3--; + } + + if (6 * t3 < 1) { + val = t1 + (t2 - t1) * 6 * t3; + } else if (2 * t3 < 1) { + val = t2; + } else if (3 * t3 < 2) { + val = t1 + (t2 - t1) * (2 / 3 - t3) * 6; + } else { + val = t1; + } + + rgb[i] = val * 255; + } + + return rgb; +}; + +convert.hsl.hsv = function (hsl) { + const h = hsl[0]; + let s = hsl[1] / 100; + let l = hsl[2] / 100; + let smin = s; + const lmin = Math.max(l, 0.01); + + l *= 2; + s *= (l <= 1) ? l : 2 - l; + smin *= lmin <= 1 ? lmin : 2 - lmin; + const v = (l + s) / 2; + const sv = l === 0 ? (2 * smin) / (lmin + smin) : (2 * s) / (l + s); + + return [h, sv * 100, v * 100]; +}; + +convert.hsv.rgb = function (hsv) { + const h = hsv[0] / 60; + const s = hsv[1] / 100; + let v = hsv[2] / 100; + const hi = Math.floor(h) % 6; + + const f = h - Math.floor(h); + const p = 255 * v * (1 - s); + const q = 255 * v * (1 - (s * f)); + const t = 255 * v * (1 - (s * (1 - f))); + v *= 255; + + switch (hi) { + case 0: + return [v, t, p]; + case 1: + return [q, v, p]; + case 2: + return [p, v, t]; + case 3: + return [p, q, v]; + case 4: + return [t, p, v]; + case 5: + return [v, p, q]; + } +}; + +convert.hsv.hsl = function (hsv) { + const h = hsv[0]; + const s = hsv[1] / 100; + const v = hsv[2] / 100; + const vmin = Math.max(v, 0.01); + let sl; + let l; + + l = (2 - s) * v; + const lmin = (2 - s) * vmin; + sl = s * vmin; + sl /= (lmin <= 1) ? lmin : 2 - lmin; + sl = sl || 0; + l /= 2; + + return [h, sl * 100, l * 100]; +}; + +// http://dev.w3.org/csswg/css-color/#hwb-to-rgb +convert.hwb.rgb = function (hwb) { + const h = hwb[0] / 360; + let wh = hwb[1] / 100; + let bl = hwb[2] / 100; + const ratio = wh + bl; + let f; + + // Wh + bl cant be > 1 + if (ratio > 1) { + wh /= ratio; + bl /= ratio; + } + + const i = Math.floor(6 * h); + const v = 1 - bl; + f = 6 * h - i; + + if ((i & 0x01) !== 0) { + f = 1 - f; + } + + const n = wh + f * (v - wh); // Linear interpolation + + let r; + let g; + let b; + /* eslint-disable max-statements-per-line,no-multi-spaces */ + switch (i) { + default: + case 6: + case 0: r = v; g = n; b = wh; break; + case 1: r = n; g = v; b = wh; break; + case 2: r = wh; g = v; b = n; break; + case 3: r = wh; g = n; b = v; break; + case 4: r = n; g = wh; b = v; break; + case 5: r = v; g = wh; b = n; break; + } + /* eslint-enable max-statements-per-line,no-multi-spaces */ + + return [r * 255, g * 255, b * 255]; +}; + +convert.cmyk.rgb = function (cmyk) { + const c = cmyk[0] / 100; + const m = cmyk[1] / 100; + const y = cmyk[2] / 100; + const k = cmyk[3] / 100; + + const r = 1 - Math.min(1, c * (1 - k) + k); + const g = 1 - Math.min(1, m * (1 - k) + k); + const b = 1 - Math.min(1, y * (1 - k) + k); + + return [r * 255, g * 255, b * 255]; +}; + +convert.xyz.rgb = function (xyz) { + const x = xyz[0] / 100; + const y = xyz[1] / 100; + const z = xyz[2] / 100; + let r; + let g; + let b; + + r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986); + g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415); + b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570); + + // Assume sRGB + r = r > 0.0031308 + ? ((1.055 * (r ** (1.0 / 2.4))) - 0.055) + : r * 12.92; + + g = g > 0.0031308 + ? ((1.055 * (g ** (1.0 / 2.4))) - 0.055) + : g * 12.92; + + b = b > 0.0031308 + ? ((1.055 * (b ** (1.0 / 2.4))) - 0.055) + : b * 12.92; + + r = Math.min(Math.max(0, r), 1); + g = Math.min(Math.max(0, g), 1); + b = Math.min(Math.max(0, b), 1); + + return [r * 255, g * 255, b * 255]; +}; + +convert.xyz.lab = function (xyz) { + let x = xyz[0]; + let y = xyz[1]; + let z = xyz[2]; + + x /= 95.047; + y /= 100; + z /= 108.883; + + x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116); + y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116); + z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116); + + const l = (116 * y) - 16; + const a = 500 * (x - y); + const b = 200 * (y - z); + + return [l, a, b]; +}; + +convert.lab.xyz = function (lab) { + const l = lab[0]; + const a = lab[1]; + const b = lab[2]; + let x; + let y; + let z; + + y = (l + 16) / 116; + x = a / 500 + y; + z = y - b / 200; + + const y2 = y ** 3; + const x2 = x ** 3; + const z2 = z ** 3; + y = y2 > 0.008856 ? y2 : (y - 16 / 116) / 7.787; + x = x2 > 0.008856 ? x2 : (x - 16 / 116) / 7.787; + z = z2 > 0.008856 ? z2 : (z - 16 / 116) / 7.787; + + x *= 95.047; + y *= 100; + z *= 108.883; + + return [x, y, z]; +}; + +convert.lab.lch = function (lab) { + const l = lab[0]; + const a = lab[1]; + const b = lab[2]; + let h; + + const hr = Math.atan2(b, a); + h = hr * 360 / 2 / Math.PI; + + if (h < 0) { + h += 360; + } + + const c = Math.sqrt(a * a + b * b); + + return [l, c, h]; +}; + +convert.lch.lab = function (lch) { + const l = lch[0]; + const c = lch[1]; + const h = lch[2]; + + const hr = h / 360 * 2 * Math.PI; + const a = c * Math.cos(hr); + const b = c * Math.sin(hr); + + return [l, a, b]; +}; + +convert.rgb.ansi16 = function (args, saturation = null) { + const [r, g, b] = args; + let value = saturation === null ? convert.rgb.hsv(args)[2] : saturation; // Hsv -> ansi16 optimization + + value = Math.round(value / 50); + + if (value === 0) { + return 30; + } + + let ansi = 30 + + ((Math.round(b / 255) << 2) + | (Math.round(g / 255) << 1) + | Math.round(r / 255)); + + if (value === 2) { + ansi += 60; + } + + return ansi; +}; + +convert.hsv.ansi16 = function (args) { + // Optimization here; we already know the value and don't need to get + // it converted for us. + return convert.rgb.ansi16(convert.hsv.rgb(args), args[2]); +}; + +convert.rgb.ansi256 = function (args) { + const r = args[0]; + const g = args[1]; + const b = args[2]; + + // We use the extended greyscale palette here, with the exception of + // black and white. normal palette only has 4 greyscale shades. + if (r === g && g === b) { + if (r < 8) { + return 16; + } + + if (r > 248) { + return 231; + } + + return Math.round(((r - 8) / 247) * 24) + 232; + } + + const ansi = 16 + + (36 * Math.round(r / 255 * 5)) + + (6 * Math.round(g / 255 * 5)) + + Math.round(b / 255 * 5); + + return ansi; +}; + +convert.ansi16.rgb = function (args) { + let color = args % 10; + + // Handle greyscale + if (color === 0 || color === 7) { + if (args > 50) { + color += 3.5; + } + + color = color / 10.5 * 255; + + return [color, color, color]; + } + + const mult = (~~(args > 50) + 1) * 0.5; + const r = ((color & 1) * mult) * 255; + const g = (((color >> 1) & 1) * mult) * 255; + const b = (((color >> 2) & 1) * mult) * 255; + + return [r, g, b]; +}; + +convert.ansi256.rgb = function (args) { + // Handle greyscale + if (args >= 232) { + const c = (args - 232) * 10 + 8; + return [c, c, c]; + } + + args -= 16; + + let rem; + const r = Math.floor(args / 36) / 5 * 255; + const g = Math.floor((rem = args % 36) / 6) / 5 * 255; + const b = (rem % 6) / 5 * 255; + + return [r, g, b]; +}; + +convert.rgb.hex = function (args) { + const integer = ((Math.round(args[0]) & 0xFF) << 16) + + ((Math.round(args[1]) & 0xFF) << 8) + + (Math.round(args[2]) & 0xFF); + + const string = integer.toString(16).toUpperCase(); + return '000000'.substring(string.length) + string; +}; + +convert.hex.rgb = function (args) { + const match = args.toString(16).match(/[a-f0-9]{6}|[a-f0-9]{3}/i); + if (!match) { + return [0, 0, 0]; + } + + let colorString = match[0]; + + if (match[0].length === 3) { + colorString = colorString.split('').map(char => { + return char + char; + }).join(''); + } + + const integer = parseInt(colorString, 16); + const r = (integer >> 16) & 0xFF; + const g = (integer >> 8) & 0xFF; + const b = integer & 0xFF; + + return [r, g, b]; +}; + +convert.rgb.hcg = function (rgb) { + const r = rgb[0] / 255; + const g = rgb[1] / 255; + const b = rgb[2] / 255; + const max = Math.max(Math.max(r, g), b); + const min = Math.min(Math.min(r, g), b); + const chroma = (max - min); + let grayscale; + let hue; + + if (chroma < 1) { + grayscale = min / (1 - chroma); + } else { + grayscale = 0; + } + + if (chroma <= 0) { + hue = 0; + } else + if (max === r) { + hue = ((g - b) / chroma) % 6; + } else + if (max === g) { + hue = 2 + (b - r) / chroma; + } else { + hue = 4 + (r - g) / chroma; + } + + hue /= 6; + hue %= 1; + + return [hue * 360, chroma * 100, grayscale * 100]; +}; + +convert.hsl.hcg = function (hsl) { + const s = hsl[1] / 100; + const l = hsl[2] / 100; + + const c = l < 0.5 ? (2.0 * s * l) : (2.0 * s * (1.0 - l)); + + let f = 0; + if (c < 1.0) { + f = (l - 0.5 * c) / (1.0 - c); + } + + return [hsl[0], c * 100, f * 100]; +}; + +convert.hsv.hcg = function (hsv) { + const s = hsv[1] / 100; + const v = hsv[2] / 100; + + const c = s * v; + let f = 0; + + if (c < 1.0) { + f = (v - c) / (1 - c); + } + + return [hsv[0], c * 100, f * 100]; +}; + +convert.hcg.rgb = function (hcg) { + const h = hcg[0] / 360; + const c = hcg[1] / 100; + const g = hcg[2] / 100; + + if (c === 0.0) { + return [g * 255, g * 255, g * 255]; + } + + const pure = [0, 0, 0]; + const hi = (h % 1) * 6; + const v = hi % 1; + const w = 1 - v; + let mg = 0; + + /* eslint-disable max-statements-per-line */ + switch (Math.floor(hi)) { + case 0: + pure[0] = 1; pure[1] = v; pure[2] = 0; break; + case 1: + pure[0] = w; pure[1] = 1; pure[2] = 0; break; + case 2: + pure[0] = 0; pure[1] = 1; pure[2] = v; break; + case 3: + pure[0] = 0; pure[1] = w; pure[2] = 1; break; + case 4: + pure[0] = v; pure[1] = 0; pure[2] = 1; break; + default: + pure[0] = 1; pure[1] = 0; pure[2] = w; + } + /* eslint-enable max-statements-per-line */ + + mg = (1.0 - c) * g; + + return [ + (c * pure[0] + mg) * 255, + (c * pure[1] + mg) * 255, + (c * pure[2] + mg) * 255 + ]; +}; + +convert.hcg.hsv = function (hcg) { + const c = hcg[1] / 100; + const g = hcg[2] / 100; + + const v = c + g * (1.0 - c); + let f = 0; + + if (v > 0.0) { + f = c / v; + } + + return [hcg[0], f * 100, v * 100]; +}; + +convert.hcg.hsl = function (hcg) { + const c = hcg[1] / 100; + const g = hcg[2] / 100; + + const l = g * (1.0 - c) + 0.5 * c; + let s = 0; + + if (l > 0.0 && l < 0.5) { + s = c / (2 * l); + } else + if (l >= 0.5 && l < 1.0) { + s = c / (2 * (1 - l)); + } + + return [hcg[0], s * 100, l * 100]; +}; + +convert.hcg.hwb = function (hcg) { + const c = hcg[1] / 100; + const g = hcg[2] / 100; + const v = c + g * (1.0 - c); + return [hcg[0], (v - c) * 100, (1 - v) * 100]; +}; + +convert.hwb.hcg = function (hwb) { + const w = hwb[1] / 100; + const b = hwb[2] / 100; + const v = 1 - b; + const c = v - w; + let g = 0; + + if (c < 1) { + g = (v - c) / (1 - c); + } + + return [hwb[0], c * 100, g * 100]; +}; + +convert.apple.rgb = function (apple) { + return [(apple[0] / 65535) * 255, (apple[1] / 65535) * 255, (apple[2] / 65535) * 255]; +}; + +convert.rgb.apple = function (rgb) { + return [(rgb[0] / 255) * 65535, (rgb[1] / 255) * 65535, (rgb[2] / 255) * 65535]; +}; + +convert.gray.rgb = function (args) { + return [args[0] / 100 * 255, args[0] / 100 * 255, args[0] / 100 * 255]; +}; + +convert.gray.hsl = function (args) { + return [0, 0, args[0]]; +}; + +convert.gray.hsv = convert.gray.hsl; + +convert.gray.hwb = function (gray) { + return [0, 100, gray[0]]; +}; + +convert.gray.cmyk = function (gray) { + return [0, 0, 0, gray[0]]; +}; + +convert.gray.lab = function (gray) { + return [gray[0], 0, 0]; +}; + +convert.gray.hex = function (gray) { + const val = Math.round(gray[0] / 100 * 255) & 0xFF; + const integer = (val << 16) + (val << 8) + val; + + const string = integer.toString(16).toUpperCase(); + return '000000'.substring(string.length) + string; +}; + +convert.rgb.gray = function (rgb) { + const val = (rgb[0] + rgb[1] + rgb[2]) / 3; + return [val / 255 * 100]; +}; diff --git a/node_modules/color-convert/index.js b/node_modules/color-convert/index.js new file mode 100644 index 000000000..b648e5737 --- /dev/null +++ b/node_modules/color-convert/index.js @@ -0,0 +1,81 @@ +const conversions = require('./conversions'); +const route = require('./route'); + +const convert = {}; + +const models = Object.keys(conversions); + +function wrapRaw(fn) { + const wrappedFn = function (...args) { + const arg0 = args[0]; + if (arg0 === undefined || arg0 === null) { + return arg0; + } + + if (arg0.length > 1) { + args = arg0; + } + + return fn(args); + }; + + // Preserve .conversion property if there is one + if ('conversion' in fn) { + wrappedFn.conversion = fn.conversion; + } + + return wrappedFn; +} + +function wrapRounded(fn) { + const wrappedFn = function (...args) { + const arg0 = args[0]; + + if (arg0 === undefined || arg0 === null) { + return arg0; + } + + if (arg0.length > 1) { + args = arg0; + } + + const result = fn(args); + + // We're assuming the result is an array here. + // see notice in conversions.js; don't use box types + // in conversion functions. + if (typeof result === 'object') { + for (let len = result.length, i = 0; i < len; i++) { + result[i] = Math.round(result[i]); + } + } + + return result; + }; + + // Preserve .conversion property if there is one + if ('conversion' in fn) { + wrappedFn.conversion = fn.conversion; + } + + return wrappedFn; +} + +models.forEach(fromModel => { + convert[fromModel] = {}; + + Object.defineProperty(convert[fromModel], 'channels', {value: conversions[fromModel].channels}); + Object.defineProperty(convert[fromModel], 'labels', {value: conversions[fromModel].labels}); + + const routes = route(fromModel); + const routeModels = Object.keys(routes); + + routeModels.forEach(toModel => { + const fn = routes[toModel]; + + convert[fromModel][toModel] = wrapRounded(fn); + convert[fromModel][toModel].raw = wrapRaw(fn); + }); +}); + +module.exports = convert; diff --git a/node_modules/color-convert/package.json b/node_modules/color-convert/package.json new file mode 100644 index 000000000..6e48000c7 --- /dev/null +++ b/node_modules/color-convert/package.json @@ -0,0 +1,48 @@ +{ + "name": "color-convert", + "description": "Plain color conversion functions", + "version": "2.0.1", + "author": "Heather Arthur ", + "license": "MIT", + "repository": "Qix-/color-convert", + "scripts": { + "pretest": "xo", + "test": "node test/basic.js" + }, + "engines": { + "node": ">=7.0.0" + }, + "keywords": [ + "color", + "colour", + "convert", + "converter", + "conversion", + "rgb", + "hsl", + "hsv", + "hwb", + "cmyk", + "ansi", + "ansi16" + ], + "files": [ + "index.js", + "conversions.js", + "route.js" + ], + "xo": { + "rules": { + "default-case": 0, + "no-inline-comments": 0, + "operator-linebreak": 0 + } + }, + "devDependencies": { + "chalk": "^2.4.2", + "xo": "^0.24.0" + }, + "dependencies": { + "color-name": "~1.1.4" + } +} diff --git a/node_modules/color-convert/route.js b/node_modules/color-convert/route.js new file mode 100644 index 000000000..1a08521b5 --- /dev/null +++ b/node_modules/color-convert/route.js @@ -0,0 +1,97 @@ +const conversions = require('./conversions'); + +/* + This function routes a model to all other models. + + all functions that are routed have a property `.conversion` attached + to the returned synthetic function. This property is an array + of strings, each with the steps in between the 'from' and 'to' + color models (inclusive). + + conversions that are not possible simply are not included. +*/ + +function buildGraph() { + const graph = {}; + // https://jsperf.com/object-keys-vs-for-in-with-closure/3 + const models = Object.keys(conversions); + + for (let len = models.length, i = 0; i < len; i++) { + graph[models[i]] = { + // http://jsperf.com/1-vs-infinity + // micro-opt, but this is simple. + distance: -1, + parent: null + }; + } + + return graph; +} + +// https://en.wikipedia.org/wiki/Breadth-first_search +function deriveBFS(fromModel) { + const graph = buildGraph(); + const queue = [fromModel]; // Unshift -> queue -> pop + + graph[fromModel].distance = 0; + + while (queue.length) { + const current = queue.pop(); + const adjacents = Object.keys(conversions[current]); + + for (let len = adjacents.length, i = 0; i < len; i++) { + const adjacent = adjacents[i]; + const node = graph[adjacent]; + + if (node.distance === -1) { + node.distance = graph[current].distance + 1; + node.parent = current; + queue.unshift(adjacent); + } + } + } + + return graph; +} + +function link(from, to) { + return function (args) { + return to(from(args)); + }; +} + +function wrapConversion(toModel, graph) { + const path = [graph[toModel].parent, toModel]; + let fn = conversions[graph[toModel].parent][toModel]; + + let cur = graph[toModel].parent; + while (graph[cur].parent) { + path.unshift(graph[cur].parent); + fn = link(conversions[graph[cur].parent][cur], fn); + cur = graph[cur].parent; + } + + fn.conversion = path; + return fn; +} + +module.exports = function (fromModel) { + const graph = deriveBFS(fromModel); + const conversion = {}; + + const models = Object.keys(graph); + for (let len = models.length, i = 0; i < len; i++) { + const toModel = models[i]; + const node = graph[toModel]; + + if (node.parent === null) { + // No possible conversion, or this node is the source model. + continue; + } + + conversion[toModel] = wrapConversion(toModel, graph); + } + + return conversion; +}; + diff --git a/node_modules/color-name/LICENSE b/node_modules/color-name/LICENSE new file mode 100644 index 000000000..4d9802a89 --- /dev/null +++ b/node_modules/color-name/LICENSE @@ -0,0 +1,8 @@ +The MIT License (MIT) +Copyright (c) 2015 Dmitry Ivanov + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/color-name/README.md b/node_modules/color-name/README.md new file mode 100644 index 000000000..3611a6b52 --- /dev/null +++ b/node_modules/color-name/README.md @@ -0,0 +1,11 @@ +A JSON with color names and its values. Based on http://dev.w3.org/csswg/css-color/#named-colors. + +[![NPM](https://nodei.co/npm/color-name.png?mini=true)](https://nodei.co/npm/color-name/) + + +```js +var colors = require('color-name'); +colors.red //[255,0,0] +``` + + diff --git a/node_modules/color-name/index.js b/node_modules/color-name/index.js new file mode 100644 index 000000000..e42aa68a5 --- /dev/null +++ b/node_modules/color-name/index.js @@ -0,0 +1,152 @@ +'use strict' + +module.exports = { + "aliceblue": [240, 248, 255], + "antiquewhite": [250, 235, 215], + "aqua": [0, 255, 255], + "aquamarine": [127, 255, 212], + "azure": [240, 255, 255], + "beige": [245, 245, 220], + "bisque": [255, 228, 196], + "black": [0, 0, 0], + "blanchedalmond": [255, 235, 205], + "blue": [0, 0, 255], + "blueviolet": [138, 43, 226], + "brown": [165, 42, 42], + "burlywood": [222, 184, 135], + "cadetblue": [95, 158, 160], + "chartreuse": [127, 255, 0], + "chocolate": [210, 105, 30], + "coral": [255, 127, 80], + "cornflowerblue": [100, 149, 237], + "cornsilk": [255, 248, 220], + "crimson": [220, 20, 60], + "cyan": [0, 255, 255], + "darkblue": [0, 0, 139], + "darkcyan": [0, 139, 139], + "darkgoldenrod": [184, 134, 11], + "darkgray": [169, 169, 169], + "darkgreen": [0, 100, 0], + "darkgrey": [169, 169, 169], + "darkkhaki": [189, 183, 107], + "darkmagenta": [139, 0, 139], + "darkolivegreen": [85, 107, 47], + "darkorange": [255, 140, 0], + "darkorchid": [153, 50, 204], + "darkred": [139, 0, 0], + "darksalmon": [233, 150, 122], + "darkseagreen": [143, 188, 143], + "darkslateblue": [72, 61, 139], + "darkslategray": [47, 79, 79], + "darkslategrey": [47, 79, 79], + "darkturquoise": [0, 206, 209], + "darkviolet": [148, 0, 211], + "deeppink": [255, 20, 147], + "deepskyblue": [0, 191, 255], + "dimgray": [105, 105, 105], + "dimgrey": [105, 105, 105], + "dodgerblue": [30, 144, 255], + "firebrick": [178, 34, 34], + "floralwhite": [255, 250, 240], + "forestgreen": [34, 139, 34], + "fuchsia": [255, 0, 255], + "gainsboro": [220, 220, 220], + "ghostwhite": [248, 248, 255], + "gold": [255, 215, 0], + "goldenrod": [218, 165, 32], + "gray": [128, 128, 128], + "green": [0, 128, 0], + "greenyellow": [173, 255, 47], + "grey": [128, 128, 128], + "honeydew": [240, 255, 240], + "hotpink": [255, 105, 180], + "indianred": [205, 92, 92], + "indigo": [75, 0, 130], + "ivory": [255, 255, 240], + "khaki": [240, 230, 140], + "lavender": [230, 230, 250], + "lavenderblush": [255, 240, 245], + "lawngreen": [124, 252, 0], + "lemonchiffon": [255, 250, 205], + "lightblue": [173, 216, 230], + "lightcoral": [240, 128, 128], + "lightcyan": [224, 255, 255], + "lightgoldenrodyellow": [250, 250, 210], + "lightgray": [211, 211, 211], + "lightgreen": [144, 238, 144], + "lightgrey": [211, 211, 211], + "lightpink": [255, 182, 193], + "lightsalmon": [255, 160, 122], + "lightseagreen": [32, 178, 170], + "lightskyblue": [135, 206, 250], + "lightslategray": [119, 136, 153], + "lightslategrey": [119, 136, 153], + "lightsteelblue": [176, 196, 222], + "lightyellow": [255, 255, 224], + "lime": [0, 255, 0], + "limegreen": [50, 205, 50], + "linen": [250, 240, 230], + "magenta": [255, 0, 255], + "maroon": [128, 0, 0], + "mediumaquamarine": [102, 205, 170], + "mediumblue": [0, 0, 205], + "mediumorchid": [186, 85, 211], + "mediumpurple": [147, 112, 219], + "mediumseagreen": [60, 179, 113], + "mediumslateblue": [123, 104, 238], + "mediumspringgreen": [0, 250, 154], + "mediumturquoise": [72, 209, 204], + "mediumvioletred": [199, 21, 133], + "midnightblue": [25, 25, 112], + "mintcream": [245, 255, 250], + "mistyrose": [255, 228, 225], + "moccasin": [255, 228, 181], + "navajowhite": [255, 222, 173], + "navy": [0, 0, 128], + "oldlace": [253, 245, 230], + "olive": [128, 128, 0], + "olivedrab": [107, 142, 35], + "orange": [255, 165, 0], + "orangered": [255, 69, 0], + "orchid": [218, 112, 214], + "palegoldenrod": [238, 232, 170], + "palegreen": [152, 251, 152], + "paleturquoise": [175, 238, 238], + "palevioletred": [219, 112, 147], + "papayawhip": [255, 239, 213], + "peachpuff": [255, 218, 185], + "peru": [205, 133, 63], + "pink": [255, 192, 203], + "plum": [221, 160, 221], + "powderblue": [176, 224, 230], + "purple": [128, 0, 128], + "rebeccapurple": [102, 51, 153], + "red": [255, 0, 0], + "rosybrown": [188, 143, 143], + "royalblue": [65, 105, 225], + "saddlebrown": [139, 69, 19], + "salmon": [250, 128, 114], + "sandybrown": [244, 164, 96], + "seagreen": [46, 139, 87], + "seashell": [255, 245, 238], + "sienna": [160, 82, 45], + "silver": [192, 192, 192], + "skyblue": [135, 206, 235], + "slateblue": [106, 90, 205], + "slategray": [112, 128, 144], + "slategrey": [112, 128, 144], + "snow": [255, 250, 250], + "springgreen": [0, 255, 127], + "steelblue": [70, 130, 180], + "tan": [210, 180, 140], + "teal": [0, 128, 128], + "thistle": [216, 191, 216], + "tomato": [255, 99, 71], + "turquoise": [64, 224, 208], + "violet": [238, 130, 238], + "wheat": [245, 222, 179], + "white": [255, 255, 255], + "whitesmoke": [245, 245, 245], + "yellow": [255, 255, 0], + "yellowgreen": [154, 205, 50] +}; diff --git a/node_modules/color-name/package.json b/node_modules/color-name/package.json new file mode 100644 index 000000000..7acc90285 --- /dev/null +++ b/node_modules/color-name/package.json @@ -0,0 +1,28 @@ +{ + "name": "color-name", + "version": "1.1.4", + "description": "A list of color names and its values", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "git@github.com:colorjs/color-name.git" + }, + "keywords": [ + "color-name", + "color", + "color-keyword", + "keyword" + ], + "author": "DY ", + "license": "MIT", + "bugs": { + "url": "https://github.com/colorjs/color-name/issues" + }, + "homepage": "https://github.com/colorjs/color-name" +} diff --git a/node_modules/color-string/LICENSE b/node_modules/color-string/LICENSE new file mode 100644 index 000000000..a8b08d4f3 --- /dev/null +++ b/node_modules/color-string/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011 Heather Arthur + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/color-string/README.md b/node_modules/color-string/README.md new file mode 100644 index 000000000..e58670c64 --- /dev/null +++ b/node_modules/color-string/README.md @@ -0,0 +1,62 @@ +# color-string + +> library for parsing and generating CSS color strings. + +## Install + +With [npm](http://npmjs.org/): + +```console +$ npm install color-string +``` + +## Usage + +### Parsing + +```js +colorString.get('#FFF') // {model: 'rgb', value: [255, 255, 255, 1]} +colorString.get('#FFFA') // {model: 'rgb', value: [255, 255, 255, 0.67]} +colorString.get('#FFFFFFAA') // {model: 'rgb', value: [255, 255, 255, 0.67]} +colorString.get('hsl(360, 100%, 50%)') // {model: 'hsl', value: [0, 100, 50, 1]} +colorString.get('hsl(360 100% 50%)') // {model: 'hsl', value: [0, 100, 50, 1]} +colorString.get('hwb(60, 3%, 60%)') // {model: 'hwb', value: [60, 3, 60, 1]} + +colorString.get.rgb('#FFF') // [255, 255, 255, 1] +colorString.get.rgb('blue') // [0, 0, 255, 1] +colorString.get.rgb('rgba(200, 60, 60, 0.3)') // [200, 60, 60, 0.3] +colorString.get.rgb('rgba(200 60 60 / 0.3)') // [200, 60, 60, 0.3] +colorString.get.rgb('rgba(200 60 60 / 30%)') // [200, 60, 60, 0.3] +colorString.get.rgb('rgb(200, 200, 200)') // [200, 200, 200, 1] +colorString.get.rgb('rgb(200 200 200)') // [200, 200, 200, 1] + +colorString.get.hsl('hsl(360, 100%, 50%)') // [0, 100, 50, 1] +colorString.get.hsl('hsl(360 100% 50%)') // [0, 100, 50, 1] +colorString.get.hsl('hsla(360, 60%, 50%, 0.4)') // [0, 60, 50, 0.4] +colorString.get.hsl('hsl(360 60% 50% / 0.4)') // [0, 60, 50, 0.4] + +colorString.get.hwb('hwb(60, 3%, 60%)') // [60, 3, 60, 1] +colorString.get.hwb('hwb(60, 3%, 60%, 0.6)') // [60, 3, 60, 0.6] + +colorString.get.rgb('invalid color string') // null +``` + +### Generation + +```js +colorString.to.hex([255, 255, 255]) // "#FFFFFF" +colorString.to.hex([0, 0, 255, 0.4]) // "#0000FF66" +colorString.to.hex([0, 0, 255], 0.4) // "#0000FF66" +colorString.to.rgb([255, 255, 255]) // "rgb(255, 255, 255)" +colorString.to.rgb([0, 0, 255, 0.4]) // "rgba(0, 0, 255, 0.4)" +colorString.to.rgb([0, 0, 255], 0.4) // "rgba(0, 0, 255, 0.4)" +colorString.to.rgb.percent([0, 0, 255]) // "rgb(0%, 0%, 100%)" +colorString.to.keyword([255, 255, 0]) // "yellow" +colorString.to.hsl([360, 100, 100]) // "hsl(360, 100%, 100%)" +colorString.to.hwb([50, 3, 15]) // "hwb(50, 3%, 15%)" + +// all functions also support swizzling +colorString.to.rgb(0, [0, 255], 0.4) // "rgba(0, 0, 255, 0.4)" +colorString.to.rgb([0, 0], [255], 0.4) // "rgba(0, 0, 255, 0.4)" +colorString.to.rgb([0], 0, [255, 0.4]) // "rgba(0, 0, 255, 0.4)" +``` diff --git a/node_modules/color-string/index.js b/node_modules/color-string/index.js new file mode 100644 index 000000000..dd5d2b7b7 --- /dev/null +++ b/node_modules/color-string/index.js @@ -0,0 +1,242 @@ +/* MIT license */ +var colorNames = require('color-name'); +var swizzle = require('simple-swizzle'); +var hasOwnProperty = Object.hasOwnProperty; + +var reverseNames = Object.create(null); + +// create a list of reverse color names +for (var name in colorNames) { + if (hasOwnProperty.call(colorNames, name)) { + reverseNames[colorNames[name]] = name; + } +} + +var cs = module.exports = { + to: {}, + get: {} +}; + +cs.get = function (string) { + var prefix = string.substring(0, 3).toLowerCase(); + var val; + var model; + switch (prefix) { + case 'hsl': + val = cs.get.hsl(string); + model = 'hsl'; + break; + case 'hwb': + val = cs.get.hwb(string); + model = 'hwb'; + break; + default: + val = cs.get.rgb(string); + model = 'rgb'; + break; + } + + if (!val) { + return null; + } + + return {model: model, value: val}; +}; + +cs.get.rgb = function (string) { + if (!string) { + return null; + } + + var abbr = /^#([a-f0-9]{3,4})$/i; + var hex = /^#([a-f0-9]{6})([a-f0-9]{2})?$/i; + var rgba = /^rgba?\(\s*([+-]?\d+)(?=[\s,])\s*(?:,\s*)?([+-]?\d+)(?=[\s,])\s*(?:,\s*)?([+-]?\d+)\s*(?:[,|\/]\s*([+-]?[\d\.]+)(%?)\s*)?\)$/; + var per = /^rgba?\(\s*([+-]?[\d\.]+)\%\s*,?\s*([+-]?[\d\.]+)\%\s*,?\s*([+-]?[\d\.]+)\%\s*(?:[,|\/]\s*([+-]?[\d\.]+)(%?)\s*)?\)$/; + var keyword = /^(\w+)$/; + + var rgb = [0, 0, 0, 1]; + var match; + var i; + var hexAlpha; + + if (match = string.match(hex)) { + hexAlpha = match[2]; + match = match[1]; + + for (i = 0; i < 3; i++) { + // https://jsperf.com/slice-vs-substr-vs-substring-methods-long-string/19 + var i2 = i * 2; + rgb[i] = parseInt(match.slice(i2, i2 + 2), 16); + } + + if (hexAlpha) { + rgb[3] = parseInt(hexAlpha, 16) / 255; + } + } else if (match = string.match(abbr)) { + match = match[1]; + hexAlpha = match[3]; + + for (i = 0; i < 3; i++) { + rgb[i] = parseInt(match[i] + match[i], 16); + } + + if (hexAlpha) { + rgb[3] = parseInt(hexAlpha + hexAlpha, 16) / 255; + } + } else if (match = string.match(rgba)) { + for (i = 0; i < 3; i++) { + rgb[i] = parseInt(match[i + 1], 0); + } + + if (match[4]) { + if (match[5]) { + rgb[3] = parseFloat(match[4]) * 0.01; + } else { + rgb[3] = parseFloat(match[4]); + } + } + } else if (match = string.match(per)) { + for (i = 0; i < 3; i++) { + rgb[i] = Math.round(parseFloat(match[i + 1]) * 2.55); + } + + if (match[4]) { + if (match[5]) { + rgb[3] = parseFloat(match[4]) * 0.01; + } else { + rgb[3] = parseFloat(match[4]); + } + } + } else if (match = string.match(keyword)) { + if (match[1] === 'transparent') { + return [0, 0, 0, 0]; + } + + if (!hasOwnProperty.call(colorNames, match[1])) { + return null; + } + + rgb = colorNames[match[1]]; + rgb[3] = 1; + + return rgb; + } else { + return null; + } + + for (i = 0; i < 3; i++) { + rgb[i] = clamp(rgb[i], 0, 255); + } + rgb[3] = clamp(rgb[3], 0, 1); + + return rgb; +}; + +cs.get.hsl = function (string) { + if (!string) { + return null; + } + + var hsl = /^hsla?\(\s*([+-]?(?:\d{0,3}\.)?\d+)(?:deg)?\s*,?\s*([+-]?[\d\.]+)%\s*,?\s*([+-]?[\d\.]+)%\s*(?:[,|\/]\s*([+-]?(?=\.\d|\d)(?:0|[1-9]\d*)?(?:\.\d*)?(?:[eE][+-]?\d+)?)\s*)?\)$/; + var match = string.match(hsl); + + if (match) { + var alpha = parseFloat(match[4]); + var h = ((parseFloat(match[1]) % 360) + 360) % 360; + var s = clamp(parseFloat(match[2]), 0, 100); + var l = clamp(parseFloat(match[3]), 0, 100); + var a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1); + + return [h, s, l, a]; + } + + return null; +}; + +cs.get.hwb = function (string) { + if (!string) { + return null; + } + + var hwb = /^hwb\(\s*([+-]?\d{0,3}(?:\.\d+)?)(?:deg)?\s*,\s*([+-]?[\d\.]+)%\s*,\s*([+-]?[\d\.]+)%\s*(?:,\s*([+-]?(?=\.\d|\d)(?:0|[1-9]\d*)?(?:\.\d*)?(?:[eE][+-]?\d+)?)\s*)?\)$/; + var match = string.match(hwb); + + if (match) { + var alpha = parseFloat(match[4]); + var h = ((parseFloat(match[1]) % 360) + 360) % 360; + var w = clamp(parseFloat(match[2]), 0, 100); + var b = clamp(parseFloat(match[3]), 0, 100); + var a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1); + return [h, w, b, a]; + } + + return null; +}; + +cs.to.hex = function () { + var rgba = swizzle(arguments); + + return ( + '#' + + hexDouble(rgba[0]) + + hexDouble(rgba[1]) + + hexDouble(rgba[2]) + + (rgba[3] < 1 + ? (hexDouble(Math.round(rgba[3] * 255))) + : '') + ); +}; + +cs.to.rgb = function () { + var rgba = swizzle(arguments); + + return rgba.length < 4 || rgba[3] === 1 + ? 'rgb(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ')' + : 'rgba(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ', ' + rgba[3] + ')'; +}; + +cs.to.rgb.percent = function () { + var rgba = swizzle(arguments); + + var r = Math.round(rgba[0] / 255 * 100); + var g = Math.round(rgba[1] / 255 * 100); + var b = Math.round(rgba[2] / 255 * 100); + + return rgba.length < 4 || rgba[3] === 1 + ? 'rgb(' + r + '%, ' + g + '%, ' + b + '%)' + : 'rgba(' + r + '%, ' + g + '%, ' + b + '%, ' + rgba[3] + ')'; +}; + +cs.to.hsl = function () { + var hsla = swizzle(arguments); + return hsla.length < 4 || hsla[3] === 1 + ? 'hsl(' + hsla[0] + ', ' + hsla[1] + '%, ' + hsla[2] + '%)' + : 'hsla(' + hsla[0] + ', ' + hsla[1] + '%, ' + hsla[2] + '%, ' + hsla[3] + ')'; +}; + +// hwb is a bit different than rgb(a) & hsl(a) since there is no alpha specific syntax +// (hwb have alpha optional & 1 is default value) +cs.to.hwb = function () { + var hwba = swizzle(arguments); + + var a = ''; + if (hwba.length >= 4 && hwba[3] !== 1) { + a = ', ' + hwba[3]; + } + + return 'hwb(' + hwba[0] + ', ' + hwba[1] + '%, ' + hwba[2] + '%' + a + ')'; +}; + +cs.to.keyword = function (rgb) { + return reverseNames[rgb.slice(0, 3)]; +}; + +// helpers +function clamp(num, min, max) { + return Math.min(Math.max(min, num), max); +} + +function hexDouble(num) { + var str = Math.round(num).toString(16).toUpperCase(); + return (str.length < 2) ? '0' + str : str; +} diff --git a/node_modules/color-string/package.json b/node_modules/color-string/package.json new file mode 100644 index 000000000..f34ee9801 --- /dev/null +++ b/node_modules/color-string/package.json @@ -0,0 +1,39 @@ +{ + "name": "color-string", + "description": "Parser and generator for CSS color strings", + "version": "1.9.1", + "author": "Heather Arthur ", + "contributors": [ + "Maxime Thirouin", + "Dyma Ywanov ", + "Josh Junon" + ], + "repository": "Qix-/color-string", + "scripts": { + "pretest": "xo", + "test": "node test/basic.js" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "xo": { + "rules": { + "no-cond-assign": 0, + "operator-linebreak": 0 + } + }, + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + }, + "devDependencies": { + "xo": "^0.12.1" + }, + "keywords": [ + "color", + "colour", + "rgb", + "css" + ] +} diff --git a/node_modules/color/LICENSE b/node_modules/color/LICENSE new file mode 100644 index 000000000..68c864eee --- /dev/null +++ b/node_modules/color/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2012 Heather Arthur + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/color/README.md b/node_modules/color/README.md new file mode 100644 index 000000000..674a7318b --- /dev/null +++ b/node_modules/color/README.md @@ -0,0 +1,123 @@ +# color + +> JavaScript library for immutable color conversion and manipulation with support for CSS color strings. + +```js +const color = Color('#7743CE').alpha(0.5).lighten(0.5); +console.log(color.hsl().string()); // 'hsla(262, 59%, 81%, 0.5)' + +console.log(color.cmyk().round().array()); // [ 16, 25, 0, 8, 0.5 ] + +console.log(color.ansi256().object()); // { ansi256: 183, alpha: 0.5 } +``` + +## Install +```console +$ npm install color +``` + +## Usage +```js +const Color = require('color'); +``` + +### Constructors +```js +const color = Color('rgb(255, 255, 255)') +const color = Color({r: 255, g: 255, b: 255}) +const color = Color.rgb(255, 255, 255) +const color = Color.rgb([255, 255, 255]) +``` + +Set the values for individual channels with `alpha`, `red`, `green`, `blue`, `hue`, `saturationl` (hsl), `saturationv` (hsv), `lightness`, `whiteness`, `blackness`, `cyan`, `magenta`, `yellow`, `black` + +String constructors are handled by [color-string](https://www.npmjs.com/package/color-string) + +### Getters +```js +color.hsl(); +``` +Convert a color to a different space (`hsl()`, `cmyk()`, etc.). + +```js +color.object(); // {r: 255, g: 255, b: 255} +``` +Get a hash of the color value. Reflects the color's current model (see above). + +```js +color.rgb().array() // [255, 255, 255] +``` +Get an array of the values with `array()`. Reflects the color's current model (see above). + +```js +color.rgbNumber() // 16777215 (0xffffff) +``` +Get the rgb number value. + +```js +color.hex() // #ffffff +``` +Get the hex value. (**NOTE:** `.hex()` does not return alpha values; use `.hexa()` for an RGBA representation) + +```js +color.red() // 255 +``` +Get the value for an individual channel. + +### CSS Strings +```js +color.hsl().string() // 'hsl(320, 50%, 100%)' +``` + +Calling `.string()` with a number rounds the numbers to that decimal place. It defaults to 1. + +### Luminosity +```js +color.luminosity(); // 0.412 +``` +The [WCAG luminosity](http://www.w3.org/TR/WCAG20/#relativeluminancedef) of the color. 0 is black, 1 is white. + +```js +color.contrast(Color("blue")) // 12 +``` +The [WCAG contrast ratio](http://www.w3.org/TR/WCAG20/#contrast-ratiodef) to another color, from 1 (same color) to 21 (contrast b/w white and black). + +```js +color.isLight(); // true +color.isDark(); // false +``` +Get whether the color is "light" or "dark", useful for deciding text color. + +### Manipulation +```js +color.negate() // rgb(0, 100, 255) -> rgb(255, 155, 0) + +color.lighten(0.5) // hsl(100, 50%, 50%) -> hsl(100, 50%, 75%) +color.lighten(0.5) // hsl(100, 50%, 0) -> hsl(100, 50%, 0) +color.darken(0.5) // hsl(100, 50%, 50%) -> hsl(100, 50%, 25%) +color.darken(0.5) // hsl(100, 50%, 0) -> hsl(100, 50%, 0) + +color.lightness(50) // hsl(100, 50%, 10%) -> hsl(100, 50%, 50%) + +color.saturate(0.5) // hsl(100, 50%, 50%) -> hsl(100, 75%, 50%) +color.desaturate(0.5) // hsl(100, 50%, 50%) -> hsl(100, 25%, 50%) +color.grayscale() // #5CBF54 -> #969696 + +color.whiten(0.5) // hwb(100, 50%, 50%) -> hwb(100, 75%, 50%) +color.blacken(0.5) // hwb(100, 50%, 50%) -> hwb(100, 50%, 75%) + +color.fade(0.5) // rgba(10, 10, 10, 0.8) -> rgba(10, 10, 10, 0.4) +color.opaquer(0.5) // rgba(10, 10, 10, 0.8) -> rgba(10, 10, 10, 1.0) + +color.rotate(180) // hsl(60, 20%, 20%) -> hsl(240, 20%, 20%) +color.rotate(-90) // hsl(60, 20%, 20%) -> hsl(330, 20%, 20%) + +color.mix(Color("yellow")) // cyan -> rgb(128, 255, 128) +color.mix(Color("yellow"), 0.3) // cyan -> rgb(77, 255, 179) + +// chaining +color.green(100).grayscale().lighten(0.6) +``` + +## Propers +The API was inspired by [color-js](https://github.com/brehaut/color-js). Manipulation functions by CSS tools like Sass, LESS, and Stylus. diff --git a/node_modules/color/index.js b/node_modules/color/index.js new file mode 100644 index 000000000..ddb0b5df6 --- /dev/null +++ b/node_modules/color/index.js @@ -0,0 +1,496 @@ +const colorString = require('color-string'); +const convert = require('color-convert'); + +const skippedModels = [ + // To be honest, I don't really feel like keyword belongs in color convert, but eh. + 'keyword', + + // Gray conflicts with some method names, and has its own method defined. + 'gray', + + // Shouldn't really be in color-convert either... + 'hex', +]; + +const hashedModelKeys = {}; +for (const model of Object.keys(convert)) { + hashedModelKeys[[...convert[model].labels].sort().join('')] = model; +} + +const limiters = {}; + +function Color(object, model) { + if (!(this instanceof Color)) { + return new Color(object, model); + } + + if (model && model in skippedModels) { + model = null; + } + + if (model && !(model in convert)) { + throw new Error('Unknown model: ' + model); + } + + let i; + let channels; + + if (object == null) { // eslint-disable-line no-eq-null,eqeqeq + this.model = 'rgb'; + this.color = [0, 0, 0]; + this.valpha = 1; + } else if (object instanceof Color) { + this.model = object.model; + this.color = [...object.color]; + this.valpha = object.valpha; + } else if (typeof object === 'string') { + const result = colorString.get(object); + if (result === null) { + throw new Error('Unable to parse color from string: ' + object); + } + + this.model = result.model; + channels = convert[this.model].channels; + this.color = result.value.slice(0, channels); + this.valpha = typeof result.value[channels] === 'number' ? result.value[channels] : 1; + } else if (object.length > 0) { + this.model = model || 'rgb'; + channels = convert[this.model].channels; + const newArray = Array.prototype.slice.call(object, 0, channels); + this.color = zeroArray(newArray, channels); + this.valpha = typeof object[channels] === 'number' ? object[channels] : 1; + } else if (typeof object === 'number') { + // This is always RGB - can be converted later on. + this.model = 'rgb'; + this.color = [ + (object >> 16) & 0xFF, + (object >> 8) & 0xFF, + object & 0xFF, + ]; + this.valpha = 1; + } else { + this.valpha = 1; + + const keys = Object.keys(object); + if ('alpha' in object) { + keys.splice(keys.indexOf('alpha'), 1); + this.valpha = typeof object.alpha === 'number' ? object.alpha : 0; + } + + const hashedKeys = keys.sort().join(''); + if (!(hashedKeys in hashedModelKeys)) { + throw new Error('Unable to parse color from object: ' + JSON.stringify(object)); + } + + this.model = hashedModelKeys[hashedKeys]; + + const {labels} = convert[this.model]; + const color = []; + for (i = 0; i < labels.length; i++) { + color.push(object[labels[i]]); + } + + this.color = zeroArray(color); + } + + // Perform limitations (clamping, etc.) + if (limiters[this.model]) { + channels = convert[this.model].channels; + for (i = 0; i < channels; i++) { + const limit = limiters[this.model][i]; + if (limit) { + this.color[i] = limit(this.color[i]); + } + } + } + + this.valpha = Math.max(0, Math.min(1, this.valpha)); + + if (Object.freeze) { + Object.freeze(this); + } +} + +Color.prototype = { + toString() { + return this.string(); + }, + + toJSON() { + return this[this.model](); + }, + + string(places) { + let self = this.model in colorString.to ? this : this.rgb(); + self = self.round(typeof places === 'number' ? places : 1); + const args = self.valpha === 1 ? self.color : [...self.color, this.valpha]; + return colorString.to[self.model](args); + }, + + percentString(places) { + const self = this.rgb().round(typeof places === 'number' ? places : 1); + const args = self.valpha === 1 ? self.color : [...self.color, this.valpha]; + return colorString.to.rgb.percent(args); + }, + + array() { + return this.valpha === 1 ? [...this.color] : [...this.color, this.valpha]; + }, + + object() { + const result = {}; + const {channels} = convert[this.model]; + const {labels} = convert[this.model]; + + for (let i = 0; i < channels; i++) { + result[labels[i]] = this.color[i]; + } + + if (this.valpha !== 1) { + result.alpha = this.valpha; + } + + return result; + }, + + unitArray() { + const rgb = this.rgb().color; + rgb[0] /= 255; + rgb[1] /= 255; + rgb[2] /= 255; + + if (this.valpha !== 1) { + rgb.push(this.valpha); + } + + return rgb; + }, + + unitObject() { + const rgb = this.rgb().object(); + rgb.r /= 255; + rgb.g /= 255; + rgb.b /= 255; + + if (this.valpha !== 1) { + rgb.alpha = this.valpha; + } + + return rgb; + }, + + round(places) { + places = Math.max(places || 0, 0); + return new Color([...this.color.map(roundToPlace(places)), this.valpha], this.model); + }, + + alpha(value) { + if (value !== undefined) { + return new Color([...this.color, Math.max(0, Math.min(1, value))], this.model); + } + + return this.valpha; + }, + + // Rgb + red: getset('rgb', 0, maxfn(255)), + green: getset('rgb', 1, maxfn(255)), + blue: getset('rgb', 2, maxfn(255)), + + hue: getset(['hsl', 'hsv', 'hsl', 'hwb', 'hcg'], 0, value => ((value % 360) + 360) % 360), + + saturationl: getset('hsl', 1, maxfn(100)), + lightness: getset('hsl', 2, maxfn(100)), + + saturationv: getset('hsv', 1, maxfn(100)), + value: getset('hsv', 2, maxfn(100)), + + chroma: getset('hcg', 1, maxfn(100)), + gray: getset('hcg', 2, maxfn(100)), + + white: getset('hwb', 1, maxfn(100)), + wblack: getset('hwb', 2, maxfn(100)), + + cyan: getset('cmyk', 0, maxfn(100)), + magenta: getset('cmyk', 1, maxfn(100)), + yellow: getset('cmyk', 2, maxfn(100)), + black: getset('cmyk', 3, maxfn(100)), + + x: getset('xyz', 0, maxfn(95.047)), + y: getset('xyz', 1, maxfn(100)), + z: getset('xyz', 2, maxfn(108.833)), + + l: getset('lab', 0, maxfn(100)), + a: getset('lab', 1), + b: getset('lab', 2), + + keyword(value) { + if (value !== undefined) { + return new Color(value); + } + + return convert[this.model].keyword(this.color); + }, + + hex(value) { + if (value !== undefined) { + return new Color(value); + } + + return colorString.to.hex(this.rgb().round().color); + }, + + hexa(value) { + if (value !== undefined) { + return new Color(value); + } + + const rgbArray = this.rgb().round().color; + + let alphaHex = Math.round(this.valpha * 255).toString(16).toUpperCase(); + if (alphaHex.length === 1) { + alphaHex = '0' + alphaHex; + } + + return colorString.to.hex(rgbArray) + alphaHex; + }, + + rgbNumber() { + const rgb = this.rgb().color; + return ((rgb[0] & 0xFF) << 16) | ((rgb[1] & 0xFF) << 8) | (rgb[2] & 0xFF); + }, + + luminosity() { + // http://www.w3.org/TR/WCAG20/#relativeluminancedef + const rgb = this.rgb().color; + + const lum = []; + for (const [i, element] of rgb.entries()) { + const chan = element / 255; + lum[i] = (chan <= 0.04045) ? chan / 12.92 : ((chan + 0.055) / 1.055) ** 2.4; + } + + return 0.2126 * lum[0] + 0.7152 * lum[1] + 0.0722 * lum[2]; + }, + + contrast(color2) { + // http://www.w3.org/TR/WCAG20/#contrast-ratiodef + const lum1 = this.luminosity(); + const lum2 = color2.luminosity(); + + if (lum1 > lum2) { + return (lum1 + 0.05) / (lum2 + 0.05); + } + + return (lum2 + 0.05) / (lum1 + 0.05); + }, + + level(color2) { + // https://www.w3.org/TR/WCAG/#contrast-enhanced + const contrastRatio = this.contrast(color2); + if (contrastRatio >= 7) { + return 'AAA'; + } + + return (contrastRatio >= 4.5) ? 'AA' : ''; + }, + + isDark() { + // YIQ equation from http://24ways.org/2010/calculating-color-contrast + const rgb = this.rgb().color; + const yiq = (rgb[0] * 2126 + rgb[1] * 7152 + rgb[2] * 722) / 10000; + return yiq < 128; + }, + + isLight() { + return !this.isDark(); + }, + + negate() { + const rgb = this.rgb(); + for (let i = 0; i < 3; i++) { + rgb.color[i] = 255 - rgb.color[i]; + } + + return rgb; + }, + + lighten(ratio) { + const hsl = this.hsl(); + hsl.color[2] += hsl.color[2] * ratio; + return hsl; + }, + + darken(ratio) { + const hsl = this.hsl(); + hsl.color[2] -= hsl.color[2] * ratio; + return hsl; + }, + + saturate(ratio) { + const hsl = this.hsl(); + hsl.color[1] += hsl.color[1] * ratio; + return hsl; + }, + + desaturate(ratio) { + const hsl = this.hsl(); + hsl.color[1] -= hsl.color[1] * ratio; + return hsl; + }, + + whiten(ratio) { + const hwb = this.hwb(); + hwb.color[1] += hwb.color[1] * ratio; + return hwb; + }, + + blacken(ratio) { + const hwb = this.hwb(); + hwb.color[2] += hwb.color[2] * ratio; + return hwb; + }, + + grayscale() { + // http://en.wikipedia.org/wiki/Grayscale#Converting_color_to_grayscale + const rgb = this.rgb().color; + const value = rgb[0] * 0.3 + rgb[1] * 0.59 + rgb[2] * 0.11; + return Color.rgb(value, value, value); + }, + + fade(ratio) { + return this.alpha(this.valpha - (this.valpha * ratio)); + }, + + opaquer(ratio) { + return this.alpha(this.valpha + (this.valpha * ratio)); + }, + + rotate(degrees) { + const hsl = this.hsl(); + let hue = hsl.color[0]; + hue = (hue + degrees) % 360; + hue = hue < 0 ? 360 + hue : hue; + hsl.color[0] = hue; + return hsl; + }, + + mix(mixinColor, weight) { + // Ported from sass implementation in C + // https://github.com/sass/libsass/blob/0e6b4a2850092356aa3ece07c6b249f0221caced/functions.cpp#L209 + if (!mixinColor || !mixinColor.rgb) { + throw new Error('Argument to "mix" was not a Color instance, but rather an instance of ' + typeof mixinColor); + } + + const color1 = mixinColor.rgb(); + const color2 = this.rgb(); + const p = weight === undefined ? 0.5 : weight; + + const w = 2 * p - 1; + const a = color1.alpha() - color2.alpha(); + + const w1 = (((w * a === -1) ? w : (w + a) / (1 + w * a)) + 1) / 2; + const w2 = 1 - w1; + + return Color.rgb( + w1 * color1.red() + w2 * color2.red(), + w1 * color1.green() + w2 * color2.green(), + w1 * color1.blue() + w2 * color2.blue(), + color1.alpha() * p + color2.alpha() * (1 - p)); + }, +}; + +// Model conversion methods and static constructors +for (const model of Object.keys(convert)) { + if (skippedModels.includes(model)) { + continue; + } + + const {channels} = convert[model]; + + // Conversion methods + Color.prototype[model] = function (...args) { + if (this.model === model) { + return new Color(this); + } + + if (args.length > 0) { + return new Color(args, model); + } + + return new Color([...assertArray(convert[this.model][model].raw(this.color)), this.valpha], model); + }; + + // 'static' construction methods + Color[model] = function (...args) { + let color = args[0]; + if (typeof color === 'number') { + color = zeroArray(args, channels); + } + + return new Color(color, model); + }; +} + +function roundTo(number, places) { + return Number(number.toFixed(places)); +} + +function roundToPlace(places) { + return function (number) { + return roundTo(number, places); + }; +} + +function getset(model, channel, modifier) { + model = Array.isArray(model) ? model : [model]; + + for (const m of model) { + (limiters[m] || (limiters[m] = []))[channel] = modifier; + } + + model = model[0]; + + return function (value) { + let result; + + if (value !== undefined) { + if (modifier) { + value = modifier(value); + } + + result = this[model](); + result.color[channel] = value; + return result; + } + + result = this[model]().color[channel]; + if (modifier) { + result = modifier(result); + } + + return result; + }; +} + +function maxfn(max) { + return function (v) { + return Math.max(0, Math.min(max, v)); + }; +} + +function assertArray(value) { + return Array.isArray(value) ? value : [value]; +} + +function zeroArray(array, length) { + for (let i = 0; i < length; i++) { + if (typeof array[i] !== 'number') { + array[i] = 0; + } + } + + return array; +} + +module.exports = Color; diff --git a/node_modules/color/package.json b/node_modules/color/package.json new file mode 100644 index 000000000..4cdb6e310 --- /dev/null +++ b/node_modules/color/package.json @@ -0,0 +1,47 @@ +{ + "name": "color", + "version": "4.2.3", + "description": "Color conversion and manipulation with CSS string support", + "sideEffects": false, + "keywords": [ + "color", + "colour", + "css" + ], + "authors": [ + "Josh Junon ", + "Heather Arthur ", + "Maxime Thirouin" + ], + "license": "MIT", + "repository": "Qix-/color", + "xo": { + "rules": { + "no-cond-assign": 0, + "new-cap": 0, + "unicorn/prefer-module": 0, + "no-mixed-operators": 0, + "complexity": 0, + "unicorn/numeric-separators-style": 0 + } + }, + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "pretest": "xo", + "test": "mocha" + }, + "engines": { + "node": ">=12.5.0" + }, + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "devDependencies": { + "mocha": "9.0.2", + "xo": "0.42.0" + } +} diff --git a/node_modules/crc-32/.flowconfig b/node_modules/crc-32/.flowconfig new file mode 100644 index 000000000..93384152d --- /dev/null +++ b/node_modules/crc-32/.flowconfig @@ -0,0 +1,17 @@ +[ignore] +.*/node_modules/.* +.*/dist/.* +.*/test.js +.*/crc32.js + +.*/bits/.* +.*/ctest/.* +.*/misc/.* +.*/perf/.* + +[include] +crc32.flow.js + +[libs] + +[options] diff --git a/node_modules/crc-32/.jshintrc b/node_modules/crc-32/.jshintrc new file mode 100644 index 000000000..5cd205be6 --- /dev/null +++ b/node_modules/crc-32/.jshintrc @@ -0,0 +1,4 @@ +{ + "bitwise": false, + "curly": false +} diff --git a/node_modules/crc-32/.npmignore b/node_modules/crc-32/.npmignore new file mode 100644 index 000000000..d5be5ab58 --- /dev/null +++ b/node_modules/crc-32/.npmignore @@ -0,0 +1,10 @@ +node_modules +misc/ +perf/ +bits/ +ctest/ +test.js +.travis.yml +.jscs.json +perf.txt +Makefile diff --git a/node_modules/crc-32/LICENSE b/node_modules/crc-32/LICENSE new file mode 100644 index 000000000..c004c7cbe --- /dev/null +++ b/node_modules/crc-32/LICENSE @@ -0,0 +1,14 @@ +Copyright (C) 2014 SheetJS + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/node_modules/crc-32/README.md b/node_modules/crc-32/README.md new file mode 100644 index 000000000..beb54768d --- /dev/null +++ b/node_modules/crc-32/README.md @@ -0,0 +1,65 @@ +# crc32 + +Standard CRC-32 algorithm implementation in JS (for the browser and nodejs). +Emphasis on correctness and performance. + +## Installation + +With [npm](https://www.npmjs.org/package/crc-32): + + $ npm install crc-32 + +In the browser: + + + +The script will manipulate `module.exports` if available (e.g. in a CommonJS +`require` context). This is not always desirable. To prevent the behavior, +define `DO_NOT_EXPORT_CRC` + +## Usage + +In all cases, the relevant function takes a single argument representing data. + +The return value is a signed 32-bit integer. + +- `CRC32.buf(byte array or buffer)` assumes the argument is a set of 8-bit + unsigned integers (e.g. nodejs `Buffer` or simple array of ints). + +- `CRC32.bstr(binary string)` interprets the argument as a binary string where + the `i`-th byte is the low byte of the UCS-2 char: `str.charCodeAt(i) & 0xFF` + +- `CRC32.str(string)` interprets the argument as a standard JS string + +For example: + +```js +> // var CRC32 = require('crc-32'); // uncomment this line if in node +> CRC32.str("SheetJS") // -1647298270 +> CRC32.bstr("SheetJS") // -1647298270 +> CRC32.buf([ 83, 104, 101, 101, 116, 74, 83 ]) // -1647298270 + +> [CRC32.str("\u2603"), CRC32.str("\u0003")] // [ -1743909036, 1259060791 ] +> [CRC32.bstr("\u2603"), CRC32.bstr("\u0003")] // [ 1259060791, 1259060791 ] +> [CRC32.buf([0x2603]), CRC32.buf([0x0003])] // [ 1259060791, 1259060791 ] +``` + +## Testing + +`make test` will run the node-based tests. + +To run the in-browser tests, run a local server and go to the `ctest` directory. +To update the browser artifacts, run `make ctest`. + +## License + +Please consult the attached LICENSE file for details. All rights not explicitly +granted by the Apache 2.0 license are reserved by the Original Author. + +## Badges + +[![Build Status](https://travis-ci.org/SheetJS/js-crc32.svg?branch=master)](https://travis-ci.org/SheetJS/js-crc32) + +[![Coverage Status](http://img.shields.io/coveralls/SheetJS/js-crc32/master.svg)](https://coveralls.io/r/SheetJS/js-crc32?branch=master) + +[![Analytics](https://ga-beacon.appspot.com/UA-36810333-1/SheetJS/js-crc32?pixel)](https://github.com/SheetJS/js-crc32) diff --git a/node_modules/crc-32/crc32.flow.js b/node_modules/crc-32/crc32.flow.js new file mode 100644 index 000000000..cd5418adf --- /dev/null +++ b/node_modules/crc-32/crc32.flow.js @@ -0,0 +1,117 @@ +/* crc32.js (C) 2014-2015 SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +var CRC32; +/*:: declare var DO_NOT_EXPORT_CRC: any; */ +/*:: declare var define: any; */ +(function (factory) { + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if('object' === typeof exports) { + factory(exports); + } else if ('function' === typeof define && define.amd) { + define(function () { + var module = {}; + factory(module); + return module; + }); + } else { + factory(CRC32 = {}); + } + } else { + factory(CRC32 = {}); + } +}(function(CRC32) { +CRC32.version = '0.3.0'; +/*:: +type CRC32Type = number; +type ABuf = Array | Buffer; +type CRC32TableType = Array | Int32Array; +*/ +/* see perf/crc32table.js */ +function signed_crc_table()/*:CRC32TableType*/ { + var c = 0, table/*:Array*/ = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var table = signed_crc_table(); +/* charCodeAt is the best approach for binary strings */ +var use_buffer = typeof Buffer !== 'undefined'; +function crc32_bstr(bstr/*:string*/)/*:CRC32Type*/ { + if(bstr.length > 32768) if(use_buffer) return crc32_buf_8(new Buffer(bstr)); + var crc = -1, L = bstr.length - 1; + for(var i = 0; i < L;) { + crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8); + crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8); + } + if(i === L) crc = (crc >>> 8) ^ table[(crc ^ bstr.charCodeAt(i)) & 0xFF]; + return crc ^ -1; +} + +function crc32_buf(buf/*:ABuf*/)/*:CRC32Type*/ { + if(buf.length > 10000) return crc32_buf_8(buf); + for(var crc = -1, i = 0, L=buf.length-3; i < L;) { + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + } + while(i < L+3) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + return crc ^ -1; +} + +function crc32_buf_8(buf/*:ABuf*/)/*:CRC32Type*/ { + for(var crc = -1, i = 0, L=buf.length-7; i < L;) { + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + } + while(i < L+7) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + return crc ^ -1; +} + +/* much much faster to intertwine utf8 and crc */ +function crc32_str(str/*:string*/)/*:CRC32Type*/ { + for(var crc = -1, i = 0, L=str.length, c, d; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + crc = (crc >>> 8) ^ table[(crc ^ c) & 0xFF]; + } else if(c < 0x800) { + crc = (crc >>> 8) ^ table[(crc ^ (192|((c>>6)&31))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++) & 1023; + crc = (crc >>> 8) ^ table[(crc ^ (240|((c>>8)&7))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>2)&63))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((d>>6)&15)|(c&3))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(d&63))) & 0xFF]; + } else { + crc = (crc >>> 8) ^ table[(crc ^ (224|((c>>12)&15))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>6)&63))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF]; + } + } + return crc ^ -1; +} +CRC32.table = table; +CRC32.bstr = crc32_bstr; +CRC32.buf = crc32_buf; +CRC32.str = crc32_str; +})); diff --git a/node_modules/crc-32/crc32.js b/node_modules/crc-32/crc32.js new file mode 100644 index 000000000..a1ca8984c --- /dev/null +++ b/node_modules/crc-32/crc32.js @@ -0,0 +1,110 @@ +/* crc32.js (C) 2014-2015 SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +var CRC32; +(function (factory) { + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if('object' === typeof exports) { + factory(exports); + } else if ('function' === typeof define && define.amd) { + define(function () { + var module = {}; + factory(module); + return module; + }); + } else { + factory(CRC32 = {}); + } + } else { + factory(CRC32 = {}); + } +}(function(CRC32) { +CRC32.version = '0.3.0'; +/* see perf/crc32table.js */ +function signed_crc_table() { + var c = 0, table = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var table = signed_crc_table(); +/* charCodeAt is the best approach for binary strings */ +var use_buffer = typeof Buffer !== 'undefined'; +function crc32_bstr(bstr) { + if(bstr.length > 32768) if(use_buffer) return crc32_buf_8(new Buffer(bstr)); + var crc = -1, L = bstr.length - 1; + for(var i = 0; i < L;) { + crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8); + crc = table[(crc ^ bstr.charCodeAt(i++)) & 0xFF] ^ (crc >>> 8); + } + if(i === L) crc = (crc >>> 8) ^ table[(crc ^ bstr.charCodeAt(i)) & 0xFF]; + return crc ^ -1; +} + +function crc32_buf(buf) { + if(buf.length > 10000) return crc32_buf_8(buf); + for(var crc = -1, i = 0, L=buf.length-3; i < L;) { + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + } + while(i < L+3) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + return crc ^ -1; +} + +function crc32_buf_8(buf) { + for(var crc = -1, i = 0, L=buf.length-7; i < L;) { + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + } + while(i < L+7) crc = (crc >>> 8) ^ table[(crc^buf[i++])&0xFF]; + return crc ^ -1; +} + +/* much much faster to intertwine utf8 and crc */ +function crc32_str(str) { + for(var crc = -1, i = 0, L=str.length, c, d; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + crc = (crc >>> 8) ^ table[(crc ^ c) & 0xFF]; + } else if(c < 0x800) { + crc = (crc >>> 8) ^ table[(crc ^ (192|((c>>6)&31))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++) & 1023; + crc = (crc >>> 8) ^ table[(crc ^ (240|((c>>8)&7))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>2)&63))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((d>>6)&15)|(c&3))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(d&63))) & 0xFF]; + } else { + crc = (crc >>> 8) ^ table[(crc ^ (224|((c>>12)&15))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|((c>>6)&63))) & 0xFF]; + crc = (crc >>> 8) ^ table[(crc ^ (128|(c&63))) & 0xFF]; + } + } + return crc ^ -1; +} +CRC32.table = table; +CRC32.bstr = crc32_bstr; +CRC32.buf = crc32_buf; +CRC32.str = crc32_str; +})); diff --git a/node_modules/crc-32/package.json b/node_modules/crc-32/package.json new file mode 100644 index 000000000..384b0313e --- /dev/null +++ b/node_modules/crc-32/package.json @@ -0,0 +1,25 @@ +{ + "name": "crc-32", + "version": "0.3.0", + "author": "sheetjs", + "description": "Pure-JS CRC-32", + "keywords": [ "crc32", "checksum", "crc" ], + "main": "./crc32", + "devDependencies": { + "mocha":"", + "uglify-js":"" + }, + "repository": { "type":"git", "url":"git://github.com/SheetJS/js-crc32.git" }, + "scripts": { + "pretest": "git submodule init && git submodule update", + "test": "make test" + }, + "config": { + "blanket": { + "pattern": "crc32.js" + } + }, + "bugs": { "url": "https://github.com/SheetJS/js-crc32/issues" }, + "license": "Apache-2.0", + "engines": { "node": ">=0.8" } +} diff --git a/node_modules/decompress-response/index.d.ts b/node_modules/decompress-response/index.d.ts new file mode 100644 index 000000000..c0be175f9 --- /dev/null +++ b/node_modules/decompress-response/index.d.ts @@ -0,0 +1,22 @@ +/// +import {IncomingMessage} from 'http'; + +/** +Decompress a HTTP response if needed. + +@param response - The HTTP incoming stream with compressed data. +@returns The decompressed HTTP response stream. + +@example +``` +import {http} from 'http'; +import decompressResponse = require('decompress-response'); + +http.get('https://sindresorhus.com', response => { + response = decompressResponse(response); +}); +``` +*/ +declare function decompressResponse(response: IncomingMessage): IncomingMessage; + +export = decompressResponse; diff --git a/node_modules/decompress-response/index.js b/node_modules/decompress-response/index.js new file mode 100644 index 000000000..c86103625 --- /dev/null +++ b/node_modules/decompress-response/index.js @@ -0,0 +1,58 @@ +'use strict'; +const {Transform, PassThrough} = require('stream'); +const zlib = require('zlib'); +const mimicResponse = require('mimic-response'); + +module.exports = response => { + const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); + + if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { + return response; + } + + // TODO: Remove this when targeting Node.js 12. + const isBrotli = contentEncoding === 'br'; + if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { + response.destroy(new Error('Brotli is not supported on Node.js < 12')); + return response; + } + + let isEmpty = true; + + const checker = new Transform({ + transform(data, _encoding, callback) { + isEmpty = false; + + callback(null, data); + }, + + flush(callback) { + callback(); + } + }); + + const finalStream = new PassThrough({ + autoDestroy: false, + destroy(error, callback) { + response.destroy(); + + callback(error); + } + }); + + const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); + + decompressStream.once('error', error => { + if (isEmpty && !response.readable) { + finalStream.end(); + return; + } + + finalStream.destroy(error); + }); + + mimicResponse(response, finalStream); + response.pipe(checker).pipe(decompressStream).pipe(finalStream); + + return finalStream; +}; diff --git a/node_modules/decompress-response/license b/node_modules/decompress-response/license new file mode 100644 index 000000000..fa7ceba3e --- /dev/null +++ b/node_modules/decompress-response/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/decompress-response/package.json b/node_modules/decompress-response/package.json new file mode 100644 index 000000000..57df8896d --- /dev/null +++ b/node_modules/decompress-response/package.json @@ -0,0 +1,56 @@ +{ + "name": "decompress-response", + "version": "6.0.0", + "description": "Decompress a HTTP response if needed", + "license": "MIT", + "repository": "sindresorhus/decompress-response", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "decompress", + "response", + "http", + "https", + "zlib", + "gzip", + "zip", + "deflate", + "unzip", + "ungzip", + "incoming", + "message", + "stream", + "compressed", + "brotli" + ], + "dependencies": { + "mimic-response": "^3.1.0" + }, + "devDependencies": { + "@types/node": "^14.0.1", + "ava": "^2.2.0", + "get-stream": "^5.0.0", + "pify": "^5.0.0", + "tsd": "^0.11.0", + "xo": "^0.30.0" + }, + "xo": { + "rules": { + "@typescript-eslint/prefer-readonly-parameter-types": "off" + } + } +} diff --git a/node_modules/decompress-response/readme.md b/node_modules/decompress-response/readme.md new file mode 100644 index 000000000..58523ef3a --- /dev/null +++ b/node_modules/decompress-response/readme.md @@ -0,0 +1,48 @@ +# decompress-response [![Build Status](https://travis-ci.com/sindresorhus/decompress-response.svg?branch=master)](https://travis-ci.com/sindresorhus/decompress-response) + +> Decompress a HTTP response if needed + +Decompresses the [response](https://nodejs.org/api/http.html#http_class_http_incomingmessage) from [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) if it's gzipped, deflated or compressed with Brotli, otherwise just passes it through. + +Used by [`got`](https://github.com/sindresorhus/got). + +## Install + +``` +$ npm install decompress-response +``` + +## Usage + +```js +const http = require('http'); +const decompressResponse = require('decompress-response'); + +http.get('https://sindresorhus.com', response => { + response = decompressResponse(response); +}); +``` + +## API + +### decompressResponse(response) + +Returns the decompressed HTTP response stream. + +#### response + +Type: [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +The HTTP incoming stream with compressed data. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/deep-extend/CHANGELOG.md b/node_modules/deep-extend/CHANGELOG.md new file mode 100644 index 000000000..dd13ec131 --- /dev/null +++ b/node_modules/deep-extend/CHANGELOG.md @@ -0,0 +1,46 @@ +Changelog +========= + +v0.6.0 +------ + +- Updated "devDependencies" versions to fix vulnerability alerts +- Dropped support of io.js and node.js v0.12.x and lower since new versions of + "devDependencies" couldn't work with those old node.js versions + (minimal supported version of node.js now is v4.0.0) + +v0.5.1 +------ + +- Fix prototype pollution vulnerability (thanks to @mwakerman for the PR) +- Avoid using deprecated Buffer API (thanks to @ChALkeR for the PR) + +v0.5.0 +------ + +- Auto-testing provided by Travis CI; +- Support older Node.JS versions (`v0.11.x` and `v0.10.x`); +- Removed tests files from npm package. + +v0.4.2 +------ + +- Fix for `null` as an argument. + +v0.4.1 +------ + +- Removed test code from npm package + ([see pull request #21](https://github.com/unclechu/node-deep-extend/pull/21)); +- Increased minimal version of Node from `0.4.0` to `0.12.0` + (because can't run tests on lesser version anyway). + +v0.4.0 +------ + +- **WARNING!** Broken backward compatibility with `v0.3.x`; +- Fixed bug with extending arrays instead of cloning; +- Deep cloning for arrays; +- Check for own property; +- Fixed some documentation issues; +- Strict JS mode. diff --git a/node_modules/deep-extend/LICENSE b/node_modules/deep-extend/LICENSE new file mode 100644 index 000000000..5c58916f2 --- /dev/null +++ b/node_modules/deep-extend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2018, Viacheslav Lotsmanov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/deep-extend/README.md b/node_modules/deep-extend/README.md new file mode 100644 index 000000000..67c7fc085 --- /dev/null +++ b/node_modules/deep-extend/README.md @@ -0,0 +1,91 @@ +Deep Extend +=========== + +Recursive object extending. + +[![Build Status](https://api.travis-ci.org/unclechu/node-deep-extend.svg?branch=master)](https://travis-ci.org/unclechu/node-deep-extend) + +[![NPM](https://nodei.co/npm/deep-extend.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/deep-extend/) + +Install +------- + +```bash +$ npm install deep-extend +``` + +Usage +----- + +```javascript +var deepExtend = require('deep-extend'); +var obj1 = { + a: 1, + b: 2, + d: { + a: 1, + b: [], + c: { test1: 123, test2: 321 } + }, + f: 5, + g: 123, + i: 321, + j: [1, 2] +}; +var obj2 = { + b: 3, + c: 5, + d: { + b: { first: 'one', second: 'two' }, + c: { test2: 222 } + }, + e: { one: 1, two: 2 }, + f: [], + g: (void 0), + h: /abc/g, + i: null, + j: [3, 4] +}; + +deepExtend(obj1, obj2); + +console.log(obj1); +/* +{ a: 1, + b: 3, + d: + { a: 1, + b: { first: 'one', second: 'two' }, + c: { test1: 123, test2: 222 } }, + f: [], + g: undefined, + c: 5, + e: { one: 1, two: 2 }, + h: /abc/g, + i: null, + j: [3, 4] } +*/ +``` + +Unit testing +------------ + +```bash +$ npm test +``` + +Changelog +--------- + +[CHANGELOG.md](./CHANGELOG.md) + +Any issues? +----------- + +Please, report about issues +[here](https://github.com/unclechu/node-deep-extend/issues). + +License +------- + +[MIT](./LICENSE) diff --git a/node_modules/deep-extend/index.js b/node_modules/deep-extend/index.js new file mode 100644 index 000000000..762d81e95 --- /dev/null +++ b/node_modules/deep-extend/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/deep-extend'); diff --git a/node_modules/deep-extend/lib/deep-extend.js b/node_modules/deep-extend/lib/deep-extend.js new file mode 100644 index 000000000..651fd8d3e --- /dev/null +++ b/node_modules/deep-extend/lib/deep-extend.js @@ -0,0 +1,150 @@ +/*! + * @description Recursive object extending + * @author Viacheslav Lotsmanov + * @license MIT + * + * The MIT License (MIT) + * + * Copyright (c) 2013-2018 Viacheslav Lotsmanov + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +function isSpecificValue(val) { + return ( + val instanceof Buffer + || val instanceof Date + || val instanceof RegExp + ) ? true : false; +} + +function cloneSpecificValue(val) { + if (val instanceof Buffer) { + var x = Buffer.alloc + ? Buffer.alloc(val.length) + : new Buffer(val.length); + val.copy(x); + return x; + } else if (val instanceof Date) { + return new Date(val.getTime()); + } else if (val instanceof RegExp) { + return new RegExp(val); + } else { + throw new Error('Unexpected situation'); + } +} + +/** + * Recursive cloning array. + */ +function deepCloneArray(arr) { + var clone = []; + arr.forEach(function (item, index) { + if (typeof item === 'object' && item !== null) { + if (Array.isArray(item)) { + clone[index] = deepCloneArray(item); + } else if (isSpecificValue(item)) { + clone[index] = cloneSpecificValue(item); + } else { + clone[index] = deepExtend({}, item); + } + } else { + clone[index] = item; + } + }); + return clone; +} + +function safeGetProperty(object, property) { + return property === '__proto__' ? undefined : object[property]; +} + +/** + * Extening object that entered in first argument. + * + * Returns extended object or false if have no target object or incorrect type. + * + * If you wish to clone source object (without modify it), just use empty new + * object as first argument, like this: + * deepExtend({}, yourObj_1, [yourObj_N]); + */ +var deepExtend = module.exports = function (/*obj_1, [obj_2], [obj_N]*/) { + if (arguments.length < 1 || typeof arguments[0] !== 'object') { + return false; + } + + if (arguments.length < 2) { + return arguments[0]; + } + + var target = arguments[0]; + + // convert arguments to array and cut off target object + var args = Array.prototype.slice.call(arguments, 1); + + var val, src, clone; + + args.forEach(function (obj) { + // skip argument if isn't an object, is null, or is an array + if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) { + return; + } + + Object.keys(obj).forEach(function (key) { + src = safeGetProperty(target, key); // source value + val = safeGetProperty(obj, key); // new value + + // recursion prevention + if (val === target) { + return; + + /** + * if new value isn't object then just overwrite by new value + * instead of extending. + */ + } else if (typeof val !== 'object' || val === null) { + target[key] = val; + return; + + // just clone arrays (and recursive clone objects inside) + } else if (Array.isArray(val)) { + target[key] = deepCloneArray(val); + return; + + // custom cloning and overwrite for specific objects + } else if (isSpecificValue(val)) { + target[key] = cloneSpecificValue(val); + return; + + // overwrite by new value if source isn't object or array + } else if (typeof src !== 'object' || src === null || Array.isArray(src)) { + target[key] = deepExtend({}, val); + return; + + // source value and new value is objects both, extending... + } else { + target[key] = deepExtend(src, val); + return; + } + }); + }); + + return target; +}; diff --git a/node_modules/deep-extend/package.json b/node_modules/deep-extend/package.json new file mode 100644 index 000000000..5f2195ff9 --- /dev/null +++ b/node_modules/deep-extend/package.json @@ -0,0 +1,62 @@ +{ + "name": "deep-extend", + "description": "Recursive object extending", + "license": "MIT", + "version": "0.6.0", + "homepage": "https://github.com/unclechu/node-deep-extend", + "keywords": [ + "deep-extend", + "extend", + "deep", + "recursive", + "xtend", + "clone", + "merge", + "json" + ], + "licenses": [ + { + "type": "MIT", + "url": "https://raw.githubusercontent.com/unclechu/node-deep-extend/master/LICENSE" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/unclechu/node-deep-extend.git" + }, + "author": "Viacheslav Lotsmanov ", + "bugs": "https://github.com/unclechu/node-deep-extend/issues", + "contributors": [ + { + "name": "Romain Prieto", + "url": "https://github.com/rprieto" + }, + { + "name": "Max Maximov", + "url": "https://github.com/maxmaximov" + }, + { + "name": "Marshall Bowers", + "url": "https://github.com/maxdeviant" + }, + { + "name": "Misha Wakerman", + "url": "https://github.com/mwakerman" + } + ], + "main": "lib/deep-extend.js", + "engines": { + "node": ">=4.0.0" + }, + "scripts": { + "test": "./node_modules/.bin/mocha" + }, + "devDependencies": { + "mocha": "5.2.0", + "should": "13.2.1" + }, + "files": [ + "index.js", + "lib/" + ] +} diff --git a/node_modules/detect-libc/LICENSE b/node_modules/detect-libc/LICENSE new file mode 100644 index 000000000..8dada3eda --- /dev/null +++ b/node_modules/detect-libc/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/detect-libc/README.md b/node_modules/detect-libc/README.md new file mode 100644 index 000000000..cd5972fd1 --- /dev/null +++ b/node_modules/detect-libc/README.md @@ -0,0 +1,160 @@ +# detect-libc + +Node.js module to detect details of the C standard library (libc) +implementation provided by a given Linux system. + +Currently supports detection of GNU glibc and MUSL libc. + +Provides asychronous and synchronous functions for the +family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). + +For previous v1.x releases, please see the +[v1](https://github.com/lovell/detect-libc/tree/v1) branch. + +## Install + +```sh +npm install detect-libc +``` + +## API + +### GLIBC + +```ts +const GLIBC: string = 'glibc'; +``` + +A String constant containing the value `glibc`. + +### MUSL + +```ts +const MUSL: string = 'musl'; +``` + +A String constant containing the value `musl`. + +### family + +```ts +function family(): Promise; +``` + +Resolves asychronously with: + +* `glibc` or `musl` when the libc family can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { family, GLIBC, MUSL } = require('detect-libc'); + +switch (await family()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### familySync + +```ts +function familySync(): string | null; +``` + +Synchronous version of `family()`. + +```js +const { familySync, GLIBC, MUSL } = require('detect-libc'); + +switch (familySync()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### version + +```ts +function version(): Promise; +``` + +Resolves asychronously with: + +* The version when it can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { version } = require('detect-libc'); + +const v = await version(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### versionSync + +```ts +function versionSync(): string | null; +``` + +Synchronous version of `version()`. + +```js +const { versionSync } = require('detect-libc'); + +const v = versionSync(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### isNonGlibcLinux + +```ts +function isNonGlibcLinux(): Promise; +``` + +Resolves asychronously with: + +* `false` when the libc family is `glibc` +* `true` when the libc family is not `glibc` +* `false` when run on a non-Linux platform + +```js +const { isNonGlibcLinux } = require('detect-libc'); + +if (await isNonGlibcLinux()) { ... } +``` + +### isNonGlibcLinuxSync + +```ts +function isNonGlibcLinuxSync(): boolean; +``` + +Synchronous version of `isNonGlibcLinux()`. + +```js +const { isNonGlibcLinuxSync } = require('detect-libc'); + +if (isNonGlibcLinuxSync()) { ... } +``` + +## Licensing + +Copyright 2017, 2022 Lovell Fuller + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html) + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/detect-libc/index.d.ts b/node_modules/detect-libc/index.d.ts new file mode 100644 index 000000000..aabe80817 --- /dev/null +++ b/node_modules/detect-libc/index.d.ts @@ -0,0 +1,11 @@ +export const GLIBC: 'glibc'; +export const MUSL: 'musl'; + +export function family(): Promise; +export function familySync(): string | null; + +export function isNonGlibcLinux(): Promise; +export function isNonGlibcLinuxSync(): boolean; + +export function version(): Promise; +export function versionSync(): string | null; diff --git a/node_modules/detect-libc/lib/detect-libc.js b/node_modules/detect-libc/lib/detect-libc.js new file mode 100644 index 000000000..5b66e612e --- /dev/null +++ b/node_modules/detect-libc/lib/detect-libc.js @@ -0,0 +1,178 @@ +'use strict'; + +const childProcess = require('child_process'); +const { isLinux, getReport } = require('./process'); + +const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; +let commandOut = ''; + +const safeCommand = () => { + if (!commandOut) { + return new Promise((resolve) => { + childProcess.exec(command, (err, out) => { + commandOut = err ? ' ' : out; + resolve(commandOut); + }); + }); + } + return commandOut; +}; + +const safeCommandSync = () => { + if (!commandOut) { + try { + commandOut = childProcess.execSync(command, { encoding: 'utf8' }); + } catch (_err) { + commandOut = ' '; + } + } + return commandOut; +}; + +/** + * A String constant containing the value `glibc`. + * @type {string} + * @public + */ +const GLIBC = 'glibc'; + +/** + * A String constant containing the value `musl`. + * @type {string} + * @public + */ +const MUSL = 'musl'; + +const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-'); + +const familyFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return GLIBC; + } + if (Array.isArray(report.sharedObjects)) { + if (report.sharedObjects.some(isFileMusl)) { + return MUSL; + } + } + return null; +}; + +const familyFromCommand = (out) => { + const [getconf, ldd1] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return GLIBC; + } + if (ldd1 && ldd1.includes(MUSL)) { + return MUSL; + } + return null; +}; + +/** + * Resolves with the libc family when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const family = async () => { + let family = null; + if (isLinux()) { + family = familyFromReport(); + if (!family) { + const out = await safeCommand(); + family = familyFromCommand(out); + } + } + return family; +}; + +/** + * Returns the libc family when it can be determined, `null` otherwise. + * @returns {?string} + */ +const familySync = () => { + let family = null; + if (isLinux()) { + family = familyFromReport(); + if (!family) { + const out = safeCommandSync(); + family = familyFromCommand(out); + } + } + return family; +}; + +/** + * Resolves `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {Promise} + */ +const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC; + +/** + * Returns `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {boolean} + */ +const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; + +const versionFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return report.header.glibcVersionRuntime; + } + return null; +}; + +const versionSuffix = (s) => s.trim().split(/\s+/)[1]; + +const versionFromCommand = (out) => { + const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return versionSuffix(getconf); + } + if (ldd1 && ldd2 && ldd1.includes(MUSL)) { + return versionSuffix(ldd2); + } + return null; +}; + +/** + * Resolves with the libc version when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const version = async () => { + let version = null; + if (isLinux()) { + version = versionFromReport(); + if (!version) { + const out = await safeCommand(); + version = versionFromCommand(out); + } + } + return version; +}; + +/** + * Returns the libc version when it can be determined, `null` otherwise. + * @returns {?string} + */ +const versionSync = () => { + let version = null; + if (isLinux()) { + version = versionFromReport(); + if (!version) { + const out = safeCommandSync(); + version = versionFromCommand(out); + } + } + return version; +}; + +module.exports = { + GLIBC, + MUSL, + family, + familySync, + isNonGlibcLinux, + isNonGlibcLinuxSync, + version, + versionSync +}; diff --git a/node_modules/detect-libc/lib/process.js b/node_modules/detect-libc/lib/process.js new file mode 100644 index 000000000..8d5840f10 --- /dev/null +++ b/node_modules/detect-libc/lib/process.js @@ -0,0 +1,16 @@ +'use strict'; + +const isLinux = () => process.platform === 'linux'; + +let report = null; +const getReport = () => { + if (!report) { + /* istanbul ignore next */ + report = isLinux() && process.report + ? process.report.getReport() + : {}; + } + return report; +}; + +module.exports = { isLinux, getReport }; diff --git a/node_modules/detect-libc/package.json b/node_modules/detect-libc/package.json new file mode 100644 index 000000000..aeb7cc1d7 --- /dev/null +++ b/node_modules/detect-libc/package.json @@ -0,0 +1,36 @@ +{ + "name": "detect-libc", + "version": "2.0.1", + "description": "Node.js module to detect the C standard library (libc) implementation family and version", + "main": "lib/detect-libc.js", + "files": [ + "lib/", + "index.d.ts" + ], + "scripts": { + "test": "semistandard && nyc --reporter=lcov --check-coverage --branches=100 ava test/unit.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/lovell/detect-libc" + }, + "keywords": [ + "libc", + "glibc", + "musl" + ], + "author": "Lovell Fuller ", + "contributors": [ + "Niklas Salmoukas " + ], + "license": "Apache-2.0", + "devDependencies": { + "ava": "^2.4.0", + "nyc": "^15.1.0", + "proxyquire": "^2.1.3", + "semistandard": "^14.2.3" + }, + "engines": { + "node": ">=8" + } +} diff --git a/node_modules/end-of-stream/LICENSE b/node_modules/end-of-stream/LICENSE new file mode 100644 index 000000000..757562ec5 --- /dev/null +++ b/node_modules/end-of-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/end-of-stream/README.md b/node_modules/end-of-stream/README.md new file mode 100644 index 000000000..857b14bd7 --- /dev/null +++ b/node_modules/end-of-stream/README.md @@ -0,0 +1,54 @@ +# end-of-stream + +A node module that calls a callback when a readable/writable/duplex stream has completed or failed. + + npm install end-of-stream + +[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) + +## Usage + +Simply pass a stream and a callback to the `eos`. +Both legacy streams, streams2 and stream3 are supported. + +``` js +var eos = require('end-of-stream'); + +eos(readableStream, function(err) { + // this will be set to the stream instance + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended', this === readableStream); +}); + +eos(writableStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished', this === writableStream); +}); + +eos(duplexStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended and finished', this === duplexStream); +}); + +eos(duplexStream, {readable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished but might still be readable'); +}); + +eos(duplexStream, {writable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended but might still be writable'); +}); + +eos(readableStream, {error:false}, function(err) { + // do not treat emit('error', err) as a end-of-stream +}); +``` + +## License + +MIT + +## Related + +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/end-of-stream/index.js b/node_modules/end-of-stream/index.js new file mode 100644 index 000000000..c77f0d5d7 --- /dev/null +++ b/node_modules/end-of-stream/index.js @@ -0,0 +1,94 @@ +var once = require('once'); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; diff --git a/node_modules/end-of-stream/package.json b/node_modules/end-of-stream/package.json new file mode 100644 index 000000000..b75bbf0fd --- /dev/null +++ b/node_modules/end-of-stream/package.json @@ -0,0 +1,37 @@ +{ + "name": "end-of-stream", + "version": "1.4.4", + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/end-of-stream.git" + }, + "dependencies": { + "once": "^1.4.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "stream", + "streams", + "callback", + "finish", + "close", + "end", + "wait" + ], + "bugs": { + "url": "https://github.com/mafintosh/end-of-stream/issues" + }, + "homepage": "https://github.com/mafintosh/end-of-stream", + "main": "index.js", + "author": "Mathias Buus ", + "license": "MIT", + "devDependencies": { + "tape": "^4.11.0" + } +} diff --git a/node_modules/expand-template/.travis.yml b/node_modules/expand-template/.travis.yml new file mode 100644 index 000000000..1335a7708 --- /dev/null +++ b/node_modules/expand-template/.travis.yml @@ -0,0 +1,6 @@ +language: node_js + +node_js: + - 6 + - 8 + - 10 diff --git a/node_modules/expand-template/LICENSE b/node_modules/expand-template/LICENSE new file mode 100644 index 000000000..814aef41d --- /dev/null +++ b/node_modules/expand-template/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Lars-Magnus Skog + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/expand-template/README.md b/node_modules/expand-template/README.md new file mode 100644 index 000000000..b98aa480c --- /dev/null +++ b/node_modules/expand-template/README.md @@ -0,0 +1,43 @@ +# expand-template + +> Expand placeholders in a template string. + +[![npm](https://img.shields.io/npm/v/expand-template.svg)](https://www.npmjs.com/package/expand-template) +![Node version](https://img.shields.io/node/v/expand-template.svg) +[![Build Status](https://travis-ci.org/ralphtheninja/expand-template.svg?branch=master)](https://travis-ci.org/ralphtheninja/expand-template) +[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com) + +## Install + +``` +$ npm i expand-template -S +``` + +## Usage + +Default functionality expands templates using `{}` as separators for string placeholders. + +```js +var expand = require('expand-template')() +var template = '{foo}/{foo}/{bar}/{bar}' +console.log(expand(template, { + foo: 'BAR', + bar: 'FOO' +})) +// -> BAR/BAR/FOO/FOO +``` + +Custom separators: + +```js +var expand = require('expand-template')({ sep: '[]' }) +var template = '[foo]/[foo]/[bar]/[bar]' +console.log(expand(template, { + foo: 'BAR', + bar: 'FOO' +})) +// -> BAR/BAR/FOO/FOO +``` + +## License +All code, unless stated otherwise, is dual-licensed under [`WTFPL`](http://www.wtfpl.net/txt/copying/) and [`MIT`](https://opensource.org/licenses/MIT). diff --git a/node_modules/expand-template/index.js b/node_modules/expand-template/index.js new file mode 100644 index 000000000..e182837c0 --- /dev/null +++ b/node_modules/expand-template/index.js @@ -0,0 +1,26 @@ +module.exports = function (opts) { + var sep = opts ? opts.sep : '{}' + var len = sep.length + + var whitespace = '\\s*' + var left = escape(sep.substring(0, len / 2)) + whitespace + var right = whitespace + escape(sep.substring(len / 2, len)) + + return function (template, values) { + Object.keys(values).forEach(function (key) { + var value = String(values[key]).replace(/\$/g, '$$$$') + template = template.replace(regExp(key), value) + }) + return template + } + + function escape (s) { + return [].map.call(s, function (char) { + return '\\' + char + }).join('') + } + + function regExp (key) { + return new RegExp(left + key + right, 'g') + } +} diff --git a/node_modules/expand-template/package.json b/node_modules/expand-template/package.json new file mode 100644 index 000000000..9a09656c0 --- /dev/null +++ b/node_modules/expand-template/package.json @@ -0,0 +1,29 @@ +{ + "name": "expand-template", + "version": "2.0.3", + "description": "Expand placeholders in a template string", + "main": "index.js", + "repository": { + "type": "git", + "url": "https://github.com/ralphtheninja/expand-template.git" + }, + "homepage": "https://github.com/ralphtheninja/expand-template", + "scripts": { + "test": "tape test.js && standard" + }, + "keywords": [ + "template", + "expand", + "replace" + ], + "author": "LM ", + "license": "(MIT OR WTFPL)", + "dependencies": {}, + "devDependencies": { + "standard": "^12.0.0", + "tape": "^4.2.2" + }, + "engines": { + "node": ">=6" + } +} diff --git a/node_modules/expand-template/test.js b/node_modules/expand-template/test.js new file mode 100644 index 000000000..ba6ed871e --- /dev/null +++ b/node_modules/expand-template/test.js @@ -0,0 +1,67 @@ +var test = require('tape') +var Expand = require('./') + +test('default expands {} placeholders', function (t) { + var expand = Expand() + t.equal(typeof expand, 'function', 'is a function') + t.equal(expand('{foo}/{bar}', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('{foo}{foo}{foo}', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('support for custom separators', function (t) { + var expand = Expand({ sep: '[]' }) + t.equal(expand('[foo]/[bar]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[foo][foo][foo]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('support for longer custom separators', function (t) { + var expand = Expand({ sep: '[[]]' }) + t.equal(expand('[[foo]]/[[bar]]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[[foo]][[foo]][[foo]]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('whitespace-insensitive', function (t) { + var expand = Expand({ sep: '[]' }) + t.equal(expand('[ foo ]/[ bar ]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[ foo ][ foo ][ foo]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('dollar escape', function (t) { + var expand = Expand() + t.equal(expand('before {foo} after', { + foo: '$' + }), 'before $ after') + t.equal(expand('before {foo} after', { + foo: '$&' + }), 'before $& after') + t.equal(expand('before {foo} after', { + foo: '$`' + }), 'before $` after') + t.equal(expand('before {foo} after', { + foo: '$\'' + }), 'before $\' after') + t.equal(expand('before {foo} after', { + foo: '$0' + }), 'before $0 after') + t.end() +}) diff --git a/node_modules/fs-constants/LICENSE b/node_modules/fs-constants/LICENSE new file mode 100644 index 000000000..cb757e5db --- /dev/null +++ b/node_modules/fs-constants/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fs-constants/README.md b/node_modules/fs-constants/README.md new file mode 100644 index 000000000..62b33742e --- /dev/null +++ b/node_modules/fs-constants/README.md @@ -0,0 +1,26 @@ +# fs-constants + +Small module that allows you to get the fs constants across +Node and the browser. + +``` +npm install fs-constants +``` + +Previously you would use `require('constants')` for this in node but that has been +deprecated and changed to `require('fs').constants` which does not browserify. + +This module uses `require('constants')` in the browser and `require('fs').constants` in node to work around this + + +## Usage + +``` js +var constants = require('fs-constants') + +console.log('constants:', constants) +``` + +## License + +MIT diff --git a/node_modules/fs-constants/browser.js b/node_modules/fs-constants/browser.js new file mode 100644 index 000000000..3c87638dc --- /dev/null +++ b/node_modules/fs-constants/browser.js @@ -0,0 +1 @@ +module.exports = require('constants') diff --git a/node_modules/fs-constants/index.js b/node_modules/fs-constants/index.js new file mode 100644 index 000000000..2a3aadf39 --- /dev/null +++ b/node_modules/fs-constants/index.js @@ -0,0 +1 @@ +module.exports = require('fs').constants || require('constants') diff --git a/node_modules/fs-constants/package.json b/node_modules/fs-constants/package.json new file mode 100644 index 000000000..6f2b8f24c --- /dev/null +++ b/node_modules/fs-constants/package.json @@ -0,0 +1,19 @@ +{ + "name": "fs-constants", + "version": "1.0.0", + "description": "Require constants across node and the browser", + "main": "index.js", + "browser": "browser.js", + "dependencies": {}, + "devDependencies": {}, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/fs-constants.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/fs-constants/issues" + }, + "homepage": "https://github.com/mafintosh/fs-constants" +} diff --git a/node_modules/github-from-package/.travis.yml b/node_modules/github-from-package/.travis.yml new file mode 100644 index 000000000..895dbd362 --- /dev/null +++ b/node_modules/github-from-package/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/node_modules/github-from-package/LICENSE b/node_modules/github-from-package/LICENSE new file mode 100644 index 000000000..ee27ba4b4 --- /dev/null +++ b/node_modules/github-from-package/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/github-from-package/example/package.json b/node_modules/github-from-package/example/package.json new file mode 100644 index 000000000..03494f486 --- /dev/null +++ b/node_modules/github-from-package/example/package.json @@ -0,0 +1,8 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} diff --git a/node_modules/github-from-package/example/url.js b/node_modules/github-from-package/example/url.js new file mode 100644 index 000000000..138fb8a66 --- /dev/null +++ b/node_modules/github-from-package/example/url.js @@ -0,0 +1,3 @@ +var github = require('../'); +var url = github(require('./package.json')); +console.log(url); diff --git a/node_modules/github-from-package/index.js b/node_modules/github-from-package/index.js new file mode 100644 index 000000000..3d1d657b3 --- /dev/null +++ b/node_modules/github-from-package/index.js @@ -0,0 +1,17 @@ +module.exports = function (pkg) { + var m; + if (m = match(JSON.stringify(pkg.repository))) { + return m; + } + else if (m = match(JSON.stringify(pkg))) { + return m; + } + return undefined; +}; + +function match (str) { + var m = /\bgithub.com[:\/]([^\/"]+)\/([^\/"]+)/.exec(str); + if (m) { + return 'https://github.com/' + m[1] + '/' + m[2].replace(/\.git$/, ''); + } +} diff --git a/node_modules/github-from-package/package.json b/node_modules/github-from-package/package.json new file mode 100644 index 000000000..a3e240fed --- /dev/null +++ b/node_modules/github-from-package/package.json @@ -0,0 +1,30 @@ +{ + "name" : "github-from-package", + "version" : "0.0.0", + "description" : "return the github url from a package.json file", + "main" : "index.js", + "devDependencies" : { + "tap" : "~0.3.0", + "tape" : "~0.1.5" + }, + "scripts" : { + "test" : "tap test/*.js" + }, + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/github-from-package.git" + }, + "homepage" : "https://github.com/substack/github-from-package", + "keywords" : [ + "github", + "package.json", + "npm", + "repository" + ], + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "license" : "MIT" +} diff --git a/node_modules/github-from-package/readme.markdown b/node_modules/github-from-package/readme.markdown new file mode 100644 index 000000000..5ba397da9 --- /dev/null +++ b/node_modules/github-from-package/readme.markdown @@ -0,0 +1,53 @@ +# github-from-package + +return the github url from a package.json file + +[![build status](https://secure.travis-ci.org/substack/github-from-package.png)](http://travis-ci.org/substack/github-from-package) + +# example + +For the `./package.json` file: + +``` json +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} +``` + +``` js +var github = require('github-from-package'); +var url = github(require('./package.json')); +console.log(url); +``` + +``` +https://github.com/substack/beep-boop +``` + +# methods + +``` js +var github = require('github-from-package') +``` + +## var url = github(pkg) + +Return the most likely github url from the package.json contents `pkg`. If no +github url can be determined, return `undefined`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install github-from-package +``` + +# license + +MIT diff --git a/node_modules/github-from-package/test/a.json b/node_modules/github-from-package/test/a.json new file mode 100644 index 000000000..03494f486 --- /dev/null +++ b/node_modules/github-from-package/test/a.json @@ -0,0 +1,8 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} diff --git a/node_modules/github-from-package/test/b.json b/node_modules/github-from-package/test/b.json new file mode 100644 index 000000000..02093257b --- /dev/null +++ b/node_modules/github-from-package/test/b.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : "git@github.com:substack/beep-boop.git" +} diff --git a/node_modules/github-from-package/test/c.json b/node_modules/github-from-package/test/c.json new file mode 100644 index 000000000..65f6ddad5 --- /dev/null +++ b/node_modules/github-from-package/test/c.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : "https://github.com/substack/beep-boop.git" +} diff --git a/node_modules/github-from-package/test/d.json b/node_modules/github-from-package/test/d.json new file mode 100644 index 000000000..c61f3cd3b --- /dev/null +++ b/node_modules/github-from-package/test/d.json @@ -0,0 +1,7 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "url": "https://github.com/substack/beep-boop" + } +} diff --git a/node_modules/github-from-package/test/e.json b/node_modules/github-from-package/test/e.json new file mode 100644 index 000000000..770b43846 --- /dev/null +++ b/node_modules/github-from-package/test/e.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "homepage": "https://github.com/substack/beep-boop/issues" +} diff --git a/node_modules/github-from-package/test/url.js b/node_modules/github-from-package/test/url.js new file mode 100644 index 000000000..d5a0a6672 --- /dev/null +++ b/node_modules/github-from-package/test/url.js @@ -0,0 +1,19 @@ +var test = require('tape'); +var github = require('../'); +var packages = { + a : require('./a.json'), + b : require('./b.json'), + c : require('./c.json'), + d : require('./d.json'), + e : require('./e.json') +}; + +test(function (t) { + t.plan(5); + var url = 'https://github.com/substack/beep-boop'; + t.equal(url, github(packages.a), 'a.json comparison'); + t.equal(url, github(packages.b), 'b.json comparison'); + t.equal(url, github(packages.c), 'c.json comparison'); + t.equal(url, github(packages.d), 'd.json comparison'); + t.equal(url, github(packages.e), 'e.json comparison'); +}); diff --git a/node_modules/ieee754/LICENSE b/node_modules/ieee754/LICENSE new file mode 100644 index 000000000..5aac82c78 --- /dev/null +++ b/node_modules/ieee754/LICENSE @@ -0,0 +1,11 @@ +Copyright 2008 Fair Oaks Labs, Inc. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/ieee754/README.md b/node_modules/ieee754/README.md new file mode 100644 index 000000000..cb7527b3c --- /dev/null +++ b/node_modules/ieee754/README.md @@ -0,0 +1,51 @@ +# ieee754 [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/ieee754/master.svg +[travis-url]: https://travis-ci.org/feross/ieee754 +[npm-image]: https://img.shields.io/npm/v/ieee754.svg +[npm-url]: https://npmjs.org/package/ieee754 +[downloads-image]: https://img.shields.io/npm/dm/ieee754.svg +[downloads-url]: https://npmjs.org/package/ieee754 +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/ieee754.svg +[saucelabs-url]: https://saucelabs.com/u/ieee754 + +### Read/write IEEE754 floating point numbers from/to a Buffer or array-like object. + +## install + +``` +npm install ieee754 +``` + +## methods + +`var ieee754 = require('ieee754')` + +The `ieee754` object has the following functions: + +``` +ieee754.read = function (buffer, offset, isLE, mLen, nBytes) +ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) +``` + +The arguments mean the following: + +- buffer = the buffer +- offset = offset into the buffer +- value = value to set (only for `write`) +- isLe = is little endian? +- mLen = mantissa length +- nBytes = number of bytes + +## what is ieee754? + +The IEEE Standard for Floating-Point Arithmetic (IEEE 754) is a technical standard for floating-point computation. [Read more](http://en.wikipedia.org/wiki/IEEE_floating_point). + +## license + +BSD 3 Clause. Copyright (c) 2008, Fair Oaks Labs, Inc. diff --git a/node_modules/ieee754/index.d.ts b/node_modules/ieee754/index.d.ts new file mode 100644 index 000000000..f1e435487 --- /dev/null +++ b/node_modules/ieee754/index.d.ts @@ -0,0 +1,10 @@ +declare namespace ieee754 { + export function read( + buffer: Uint8Array, offset: number, isLE: boolean, mLen: number, + nBytes: number): number; + export function write( + buffer: Uint8Array, value: number, offset: number, isLE: boolean, + mLen: number, nBytes: number): void; + } + + export = ieee754; \ No newline at end of file diff --git a/node_modules/ieee754/index.js b/node_modules/ieee754/index.js new file mode 100644 index 000000000..81d26c343 --- /dev/null +++ b/node_modules/ieee754/index.js @@ -0,0 +1,85 @@ +/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = ((value * c) - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} diff --git a/node_modules/ieee754/package.json b/node_modules/ieee754/package.json new file mode 100644 index 000000000..7b2385138 --- /dev/null +++ b/node_modules/ieee754/package.json @@ -0,0 +1,52 @@ +{ + "name": "ieee754", + "description": "Read/write IEEE754 floating point numbers from/to a Buffer or array-like object", + "version": "1.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "contributors": [ + "Romain Beauxis " + ], + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "keywords": [ + "IEEE 754", + "buffer", + "convert", + "floating point", + "ieee754" + ], + "license": "BSD-3-Clause", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/ieee754.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/ini/LICENSE b/node_modules/ini/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/node_modules/ini/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/ini/README.md b/node_modules/ini/README.md new file mode 100644 index 000000000..33df25829 --- /dev/null +++ b/node_modules/ini/README.md @@ -0,0 +1,102 @@ +An ini format parser and serializer for node. + +Sections are treated as nested objects. Items before the first +heading are saved on the object directly. + +## Usage + +Consider an ini-file `config.ini` that looks like this: + + ; this comment is being ignored + scope = global + + [database] + user = dbuser + password = dbpassword + database = use_this_database + + [paths.default] + datadir = /var/lib/data + array[] = first value + array[] = second value + array[] = third value + +You can read, manipulate and write the ini-file like so: + + var fs = require('fs') + , ini = require('ini') + + var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8')) + + config.scope = 'local' + config.database.database = 'use_another_database' + config.paths.default.tmpdir = '/tmp' + delete config.paths.default.datadir + config.paths.default.array.push('fourth value') + + fs.writeFileSync('./config_modified.ini', ini.stringify(config, { section: 'section' })) + +This will result in a file called `config_modified.ini` being written +to the filesystem with the following content: + + [section] + scope=local + [section.database] + user=dbuser + password=dbpassword + database=use_another_database + [section.paths.default] + tmpdir=/tmp + array[]=first value + array[]=second value + array[]=third value + array[]=fourth value + + +## API + +### decode(inistring) + +Decode the ini-style formatted `inistring` into a nested object. + +### parse(inistring) + +Alias for `decode(inistring)` + +### encode(object, [options]) + +Encode the object `object` into an ini-style formatted string. If the +optional parameter `section` is given, then all top-level properties +of the object are put into this section and the `section`-string is +prepended to all sub-sections, see the usage example above. + +The `options` object may contain the following: + +* `section` A string which will be the first `section` in the encoded + ini data. Defaults to none. +* `whitespace` Boolean to specify whether to put whitespace around the + `=` character. By default, whitespace is omitted, to be friendly to + some persnickety old parsers that don't tolerate it well. But some + find that it's more human-readable and pretty with the whitespace. + +For backwards compatibility reasons, if a `string` options is passed +in, then it is assumed to be the `section` value. + +### stringify(object, [options]) + +Alias for `encode(object, [options])` + +### safe(val) + +Escapes the string `val` such that it is safe to be used as a key or +value in an ini-file. Basically escapes quotes. For example + + ini.safe('"unsafe string"') + +would result in + + "\"unsafe string\"" + +### unsafe(val) + +Unescapes the string `val` diff --git a/node_modules/ini/ini.js b/node_modules/ini/ini.js new file mode 100644 index 000000000..b576f08d7 --- /dev/null +++ b/node_modules/ini/ini.js @@ -0,0 +1,206 @@ +exports.parse = exports.decode = decode + +exports.stringify = exports.encode = encode + +exports.safe = safe +exports.unsafe = unsafe + +var eol = typeof process !== 'undefined' && + process.platform === 'win32' ? '\r\n' : '\n' + +function encode (obj, opt) { + var children = [] + var out = '' + + if (typeof opt === 'string') { + opt = { + section: opt, + whitespace: false, + } + } else { + opt = opt || {} + opt.whitespace = opt.whitespace === true + } + + var separator = opt.whitespace ? ' = ' : '=' + + Object.keys(obj).forEach(function (k, _, __) { + var val = obj[k] + if (val && Array.isArray(val)) { + val.forEach(function (item) { + out += safe(k + '[]') + separator + safe(item) + '\n' + }) + } else if (val && typeof val === 'object') + children.push(k) + else + out += safe(k) + separator + safe(val) + eol + }) + + if (opt.section && out.length) + out = '[' + safe(opt.section) + ']' + eol + out + + children.forEach(function (k, _, __) { + var nk = dotSplit(k).join('\\.') + var section = (opt.section ? opt.section + '.' : '') + nk + var child = encode(obj[k], { + section: section, + whitespace: opt.whitespace, + }) + if (out.length && child.length) + out += eol + + out += child + }) + + return out +} + +function dotSplit (str) { + return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002') + .replace(/\\\./g, '\u0001') + .split(/\./).map(function (part) { + return part.replace(/\1/g, '\\.') + .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001') + }) +} + +function decode (str) { + var out = {} + var p = out + var section = null + // section |key = value + var re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i + var lines = str.split(/[\r\n]+/g) + + lines.forEach(function (line, _, __) { + if (!line || line.match(/^\s*[;#]/)) + return + var match = line.match(re) + if (!match) + return + if (match[1] !== undefined) { + section = unsafe(match[1]) + if (section === '__proto__') { + // not allowed + // keep parsing the section, but don't attach it. + p = {} + return + } + p = out[section] = out[section] || {} + return + } + var key = unsafe(match[2]) + if (key === '__proto__') + return + var value = match[3] ? unsafe(match[4]) : true + switch (value) { + case 'true': + case 'false': + case 'null': value = JSON.parse(value) + } + + // Convert keys with '[]' suffix to an array + if (key.length > 2 && key.slice(-2) === '[]') { + key = key.substring(0, key.length - 2) + if (key === '__proto__') + return + if (!p[key]) + p[key] = [] + else if (!Array.isArray(p[key])) + p[key] = [p[key]] + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) + p[key].push(value) + else + p[key] = value + }) + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + Object.keys(out).filter(function (k, _, __) { + if (!out[k] || + typeof out[k] !== 'object' || + Array.isArray(out[k])) + return false + + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + var parts = dotSplit(k) + var p = out + var l = parts.pop() + var nl = l.replace(/\\\./g, '.') + parts.forEach(function (part, _, __) { + if (part === '__proto__') + return + if (!p[part] || typeof p[part] !== 'object') + p[part] = {} + p = p[part] + }) + if (p === out && nl === l) + return false + + p[nl] = out[k] + return true + }).forEach(function (del, _, __) { + delete out[del] + }) + + return out +} + +function isQuoted (val) { + return (val.charAt(0) === '"' && val.slice(-1) === '"') || + (val.charAt(0) === "'" && val.slice(-1) === "'") +} + +function safe (val) { + return (typeof val !== 'string' || + val.match(/[=\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && + isQuoted(val)) || + val !== val.trim()) + ? JSON.stringify(val) + : val.replace(/;/g, '\\;').replace(/#/g, '\\#') +} + +function unsafe (val, doUnesc) { + val = (val || '').trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") + val = val.substr(1, val.length - 2) + + try { + val = JSON.parse(val) + } catch (_) {} + } else { + // walk the val to find the first not-escaped ; character + var esc = false + var unesc = '' + for (var i = 0, l = val.length; i < l; i++) { + var c = val.charAt(i) + if (esc) { + if ('\\;#'.indexOf(c) !== -1) + unesc += c + else + unesc += '\\' + c + + esc = false + } else if (';#'.indexOf(c) !== -1) + break + else if (c === '\\') + esc = true + else + unesc += c + } + if (esc) + unesc += '\\' + + return unesc.trim() + } + return val +} diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json new file mode 100644 index 000000000..c830a3556 --- /dev/null +++ b/node_modules/ini/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "ini", + "description": "An ini encoder/decoder for node", + "version": "1.3.8", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/ini.git" + }, + "main": "ini.js", + "scripts": { + "eslint": "eslint", + "lint": "npm run eslint -- ini.js test/*.js", + "lintfix": "npm run lint -- --fix", + "test": "tap", + "posttest": "npm run lint", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "devDependencies": { + "eslint": "^7.9.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "tap": "14" + }, + "license": "ISC", + "files": [ + "ini.js" + ] +} diff --git a/node_modules/is-arrayish/LICENSE b/node_modules/is-arrayish/LICENSE new file mode 100644 index 000000000..0a5f461a6 --- /dev/null +++ b/node_modules/is-arrayish/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 JD Ballard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-arrayish/README.md b/node_modules/is-arrayish/README.md new file mode 100644 index 000000000..7d360724c --- /dev/null +++ b/node_modules/is-arrayish/README.md @@ -0,0 +1,16 @@ +# node-is-arrayish [![Travis-CI.org Build Status](https://img.shields.io/travis/Qix-/node-is-arrayish.svg?style=flat-square)](https://travis-ci.org/Qix-/node-is-arrayish) [![Coveralls.io Coverage Rating](https://img.shields.io/coveralls/Qix-/node-is-arrayish.svg?style=flat-square)](https://coveralls.io/r/Qix-/node-is-arrayish) +> Determines if an object can be used like an Array + +## Example +```javascript +var isArrayish = require('is-arrayish'); + +isArrayish([]); // true +isArrayish({__proto__: []}); // true +isArrayish({}); // false +isArrayish({length:10}); // false +``` + +## License +Licensed under the [MIT License](http://opensource.org/licenses/MIT). +You can find a copy of it in [LICENSE](LICENSE). diff --git a/node_modules/is-arrayish/index.js b/node_modules/is-arrayish/index.js new file mode 100644 index 000000000..729ca40cd --- /dev/null +++ b/node_modules/is-arrayish/index.js @@ -0,0 +1,9 @@ +module.exports = function isArrayish(obj) { + if (!obj || typeof obj === 'string') { + return false; + } + + return obj instanceof Array || Array.isArray(obj) || + (obj.length >= 0 && (obj.splice instanceof Function || + (Object.getOwnPropertyDescriptor(obj, (obj.length - 1)) && obj.constructor.name !== 'String'))); +}; diff --git a/node_modules/is-arrayish/package.json b/node_modules/is-arrayish/package.json new file mode 100644 index 000000000..8a54e33cc --- /dev/null +++ b/node_modules/is-arrayish/package.json @@ -0,0 +1,45 @@ +{ + "name": "is-arrayish", + "description": "Determines if an object can be used as an array", + "version": "0.3.2", + "author": "Qix (http://github.com/qix-)", + "keywords": [ + "is", + "array", + "duck", + "type", + "arrayish", + "similar", + "proto", + "prototype", + "type" + ], + "license": "MIT", + "scripts": { + "test": "mocha --require coffeescript/register ./test/**/*.coffee", + "lint": "zeit-eslint --ext .jsx,.js .", + "lint-staged": "git diff --diff-filter=ACMRT --cached --name-only '*.js' '*.jsx' | xargs zeit-eslint" + }, + "repository": { + "type": "git", + "url": "https://github.com/qix-/node-is-arrayish.git" + }, + "devDependencies": { + "@zeit/eslint-config-node": "^0.3.0", + "@zeit/git-hooks": "^0.1.4", + "coffeescript": "^2.3.1", + "coveralls": "^3.0.1", + "eslint": "^4.19.1", + "istanbul": "^0.4.5", + "mocha": "^5.2.0", + "should": "^13.2.1" + }, + "eslintConfig": { + "extends": [ + "@zeit/eslint-config-node" + ] + }, + "git": { + "pre-commit": "lint-staged" + } +} diff --git a/node_modules/is-arrayish/yarn-error.log b/node_modules/is-arrayish/yarn-error.log new file mode 100644 index 000000000..d3dcf37b7 --- /dev/null +++ b/node_modules/is-arrayish/yarn-error.log @@ -0,0 +1,1443 @@ +Arguments: + /Users/junon/n/bin/node /Users/junon/.yarn/bin/yarn.js test + +PATH: + /Users/junon/.yarn/bin:/Users/junon/.config/yarn/global/node_modules/.bin:/Users/junon/perl5/bin:/Users/junon/google-cloud-sdk/bin:/usr/local/sbin:/usr/local/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/X11/bin:/Users/junon/bin:/Users/junon/.local/bin:/src/.go/bin:/src/llvm/llvm/build/bin:/Users/junon/Library/Android/sdk/platform-tools:/Users/junon/n/bin:/usr/local/texlive/2017/bin/x86_64-darwin/ + +Yarn version: + 1.5.1 + +Node version: + 9.6.1 + +Platform: + darwin x64 + +npm manifest: + { + "name": "is-arrayish", + "description": "Determines if an object can be used as an array", + "version": "0.3.1", + "author": "Qix (http://github.com/qix-)", + "keywords": [ + "is", + "array", + "duck", + "type", + "arrayish", + "similar", + "proto", + "prototype", + "type" + ], + "license": "MIT", + "scripts": { + "test": "mocha --require coffeescript/register", + "lint": "zeit-eslint --ext .jsx,.js .", + "lint-staged": "git diff --diff-filter=ACMRT --cached --name-only '*.js' '*.jsx' | xargs zeit-eslint" + }, + "repository": { + "type": "git", + "url": "https://github.com/qix-/node-is-arrayish.git" + }, + "devDependencies": { + "@zeit/eslint-config-node": "^0.3.0", + "@zeit/git-hooks": "^0.1.4", + "coffeescript": "^2.3.1", + "coveralls": "^3.0.1", + "eslint": "^4.19.1", + "istanbul": "^0.4.5", + "mocha": "^5.2.0", + "should": "^13.2.1" + }, + "eslintConfig": { + "extends": [ + "@zeit/eslint-config-node" + ] + }, + "git": { + "pre-commit": "lint-staged" + } + } + +yarn manifest: + No manifest + +Lockfile: + # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. + # yarn lockfile v1 + + + "@zeit/eslint-config-base@0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@zeit/eslint-config-base/-/eslint-config-base-0.3.0.tgz#32a58c3e52eca4025604758cb4591f3d28e22fb4" + dependencies: + arg "^1.0.0" + chalk "^2.3.0" + + "@zeit/eslint-config-node@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@zeit/eslint-config-node/-/eslint-config-node-0.3.0.tgz#6e328328f366f66c2a0549a69131bbcd9735f098" + dependencies: + "@zeit/eslint-config-base" "0.3.0" + + "@zeit/git-hooks@^0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@zeit/git-hooks/-/git-hooks-0.1.4.tgz#70583db5dd69726a62c7963520e67f2c3a33cc5f" + + abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + + abbrev@1.0.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" + + acorn-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" + dependencies: + acorn "^3.0.4" + + acorn@^3.0.4: + version "3.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" + + acorn@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.1.tgz#f095829297706a7c9776958c0afc8930a9b9d9d8" + + ajv-keywords@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762" + + ajv@^5.1.0, ajv@^5.2.3, ajv@^5.3.0: + version "5.5.2" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" + dependencies: + co "^4.6.0" + fast-deep-equal "^1.0.0" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.3.0" + + align-text@^0.1.1, align-text@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" + dependencies: + kind-of "^3.0.2" + longest "^1.0.1" + repeat-string "^1.5.2" + + amdefine@>=0.0.4: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + + ansi-escapes@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.1.0.tgz#f73207bb81207d75fd6c83f125af26eea378ca30" + + ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + + ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + + ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + + ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + dependencies: + color-convert "^1.9.0" + + arg@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arg/-/arg-1.0.1.tgz#892a26d841bd5a64880bbc8f73dd64a705910ca3" + + argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + dependencies: + sprintf-js "~1.0.2" + + array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + + array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + + arrify@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + + asn1@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + + assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + + async@1.x, async@^1.4.0: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + + asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + + aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + + aws4@^1.6.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.7.0.tgz#d4d0e9b9dbfca77bf08eeb0a8a471550fe39e289" + + babel-code-frame@^6.22.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + dependencies: + chalk "^1.1.3" + esutils "^2.0.2" + js-tokens "^3.0.2" + + balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + + bcrypt-pbkdf@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + dependencies: + tweetnacl "^0.14.3" + + brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + + browser-stdout@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" + + buffer-from@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.0.tgz#87fcaa3a298358e0ade6e442cfce840740d1ad04" + + caller-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" + dependencies: + callsites "^0.2.0" + + callsites@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" + + camelcase@^1.0.2: + version "1.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + + caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + + center-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" + dependencies: + align-text "^0.1.3" + lazy-cache "^1.0.3" + + chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + + chalk@^2.0.0, chalk@^2.1.0, chalk@^2.3.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + + chardet@^0.4.0: + version "0.4.2" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" + + circular-json@^0.3.1: + version "0.3.3" + resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" + + cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + dependencies: + restore-cursor "^2.0.0" + + cli-width@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" + + cliui@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" + dependencies: + center-align "^0.1.1" + right-align "^0.1.1" + wordwrap "0.0.2" + + co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + + coffeescript@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/coffeescript/-/coffeescript-2.3.1.tgz#a25f69c251d25805c9842e57fc94bfc453ef6aed" + + color-convert@^1.9.0: + version "1.9.2" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.2.tgz#49881b8fba67df12a96bdf3f56c0aab9e7913147" + dependencies: + color-name "1.1.1" + + color-name@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.1.tgz#4b1415304cf50028ea81643643bd82ea05803689" + + combined-stream@1.0.6, combined-stream@~1.0.5: + version "1.0.6" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.6.tgz#723e7df6e801ac5613113a7e445a9b69cb632818" + dependencies: + delayed-stream "~1.0.0" + + commander@2.15.1: + version "2.15.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f" + + concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + + concat-stream@^1.6.0: + version "1.6.2" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + + core-util-is@1.0.2, core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + + coveralls@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/coveralls/-/coveralls-3.0.1.tgz#12e15914eaa29204e56869a5ece7b9e1492d2ae2" + dependencies: + js-yaml "^3.6.1" + lcov-parse "^0.0.10" + log-driver "^1.2.5" + minimist "^1.2.0" + request "^2.79.0" + + cross-spawn@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + + dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + dependencies: + assert-plus "^1.0.0" + + debug@3.1.0, debug@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + dependencies: + ms "2.0.0" + + decamelize@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + + deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + + del@^2.0.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + + delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + + diff@3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + + doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + dependencies: + esutils "^2.0.2" + + ecc-jsbn@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + dependencies: + jsbn "~0.1.0" + + escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + + escodegen@1.8.x: + version "1.8.1" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" + dependencies: + esprima "^2.7.1" + estraverse "^1.9.1" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.2.0" + + eslint-scope@^3.7.1: + version "3.7.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8" + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + + eslint-visitor-keys@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d" + + eslint@^4.19.1: + version "4.19.1" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-4.19.1.tgz#32d1d653e1d90408854bfb296f076ec7e186a300" + dependencies: + ajv "^5.3.0" + babel-code-frame "^6.22.0" + chalk "^2.1.0" + concat-stream "^1.6.0" + cross-spawn "^5.1.0" + debug "^3.1.0" + doctrine "^2.1.0" + eslint-scope "^3.7.1" + eslint-visitor-keys "^1.0.0" + espree "^3.5.4" + esquery "^1.0.0" + esutils "^2.0.2" + file-entry-cache "^2.0.0" + functional-red-black-tree "^1.0.1" + glob "^7.1.2" + globals "^11.0.1" + ignore "^3.3.3" + imurmurhash "^0.1.4" + inquirer "^3.0.6" + is-resolvable "^1.0.0" + js-yaml "^3.9.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.3.0" + lodash "^4.17.4" + minimatch "^3.0.2" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.2" + pluralize "^7.0.0" + progress "^2.0.0" + regexpp "^1.0.1" + require-uncached "^1.0.3" + semver "^5.3.0" + strip-ansi "^4.0.0" + strip-json-comments "~2.0.1" + table "4.0.2" + text-table "~0.2.0" + + espree@^3.5.4: + version "3.5.4" + resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.4.tgz#b0f447187c8a8bed944b815a660bddf5deb5d1a7" + dependencies: + acorn "^5.5.0" + acorn-jsx "^3.0.0" + + esprima@2.7.x, esprima@^2.7.1: + version "2.7.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" + + esprima@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" + + esquery@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" + dependencies: + estraverse "^4.0.0" + + esrecurse@^4.1.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" + dependencies: + estraverse "^4.1.0" + + estraverse@^1.9.1: + version "1.9.3" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" + + estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + + esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + + extend@~3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" + + external-editor@^2.0.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" + dependencies: + chardet "^0.4.0" + iconv-lite "^0.4.17" + tmp "^0.0.33" + + extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + + extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + + fast-deep-equal@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" + + fast-json-stable-stringify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" + + fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + + figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + dependencies: + escape-string-regexp "^1.0.5" + + file-entry-cache@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" + dependencies: + flat-cache "^1.2.1" + object-assign "^4.0.1" + + flat-cache@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481" + dependencies: + circular-json "^0.3.1" + del "^2.0.2" + graceful-fs "^4.1.2" + write "^0.2.1" + + forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + + form-data@~2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.2.tgz#4970498be604c20c005d4f5c23aecd21d6b49099" + dependencies: + asynckit "^0.4.0" + combined-stream "1.0.6" + mime-types "^2.1.12" + + fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + + functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + + getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + dependencies: + assert-plus "^1.0.0" + + glob@7.1.2, glob@^7.0.3, glob@^7.0.5, glob@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + + glob@^5.0.15: + version "5.0.15" + resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + + globals@^11.0.1: + version "11.5.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.5.0.tgz#6bc840de6771173b191f13d3a9c94d441ee92642" + + globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + + graceful-fs@^4.1.2: + version "4.1.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + + growl@1.10.5: + version "1.10.5" + resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e" + + handlebars@^4.0.1: + version "4.0.11" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc" + dependencies: + async "^1.4.0" + optimist "^0.6.1" + source-map "^0.4.4" + optionalDependencies: + uglify-js "^2.6" + + har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + + har-validator@~5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" + dependencies: + ajv "^5.1.0" + har-schema "^2.0.0" + + has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + dependencies: + ansi-regex "^2.0.0" + + has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + + has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + + he@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd" + + http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + + iconv-lite@^0.4.17: + version "0.4.23" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63" + dependencies: + safer-buffer ">= 2.1.2 < 3" + + ignore@^3.3.3: + version "3.3.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.8.tgz#3f8e9c35d38708a3a7e0e9abb6c73e7ee7707b2b" + + imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + + inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + dependencies: + once "^1.3.0" + wrappy "1" + + inherits@2, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + + inquirer@^3.0.6: + version "3.3.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.3.0.tgz#9dd2f2ad765dcab1ff0443b491442a20ba227dc9" + dependencies: + ansi-escapes "^3.0.0" + chalk "^2.0.0" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^2.0.4" + figures "^2.0.0" + lodash "^4.3.0" + mute-stream "0.0.7" + run-async "^2.2.0" + rx-lite "^4.0.8" + rx-lite-aggregates "^4.0.8" + string-width "^2.1.0" + strip-ansi "^4.0.0" + through "^2.3.6" + + is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + + is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + + is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + + is-path-in-cwd@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz#5ac48b345ef675339bd6c7a48a912110b241cf52" + dependencies: + is-path-inside "^1.0.0" + + is-path-inside@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" + dependencies: + path-is-inside "^1.0.1" + + is-promise@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" + + is-resolvable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" + + is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + + isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + + isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + + isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + + istanbul@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-0.4.5.tgz#65c7d73d4c4da84d4f3ac310b918fb0b8033733b" + dependencies: + abbrev "1.0.x" + async "1.x" + escodegen "1.8.x" + esprima "2.7.x" + glob "^5.0.15" + handlebars "^4.0.1" + js-yaml "3.x" + mkdirp "0.5.x" + nopt "3.x" + once "1.x" + resolve "1.1.x" + supports-color "^3.1.0" + which "^1.1.1" + wordwrap "^1.0.0" + + js-tokens@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + + js-yaml@3.x, js-yaml@^3.6.1, js-yaml@^3.9.1: + version "3.12.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1" + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + + jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + + json-schema-traverse@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" + + json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + + json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + + json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + + jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + + kind-of@^3.0.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + dependencies: + is-buffer "^1.1.5" + + lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + + lcov-parse@^0.0.10: + version "0.0.10" + resolved "https://registry.yarnpkg.com/lcov-parse/-/lcov-parse-0.0.10.tgz#1b0b8ff9ac9c7889250582b70b71315d9da6d9a3" + + levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + + lodash@^4.17.4, lodash@^4.3.0: + version "4.17.10" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.10.tgz#1b7793cf7259ea38fb3661d4d38b3260af8ae4e7" + + log-driver@^1.2.5: + version "1.2.7" + resolved "https://registry.yarnpkg.com/log-driver/-/log-driver-1.2.7.tgz#63b95021f0702fedfa2c9bb0a24e7797d71871d8" + + longest@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" + + lru-cache@^4.0.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.3.tgz#a1175cf3496dfc8436c156c334b4955992bce69c" + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + + mime-db@~1.33.0: + version "1.33.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db" + + mime-types@^2.1.12, mime-types@~2.1.17: + version "2.1.18" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8" + dependencies: + mime-db "~1.33.0" + + mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + + "minimatch@2 || 3", minimatch@3.0.4, minimatch@^3.0.2, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + dependencies: + brace-expansion "^1.1.7" + + minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + + minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + + minimist@~0.0.1: + version "0.0.10" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" + + mkdirp@0.5.1, mkdirp@0.5.x, mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + dependencies: + minimist "0.0.8" + + mocha@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.2.0.tgz#6d8ae508f59167f940f2b5b3c4a612ae50c90ae6" + dependencies: + browser-stdout "1.3.1" + commander "2.15.1" + debug "3.1.0" + diff "3.5.0" + escape-string-regexp "1.0.5" + glob "7.1.2" + growl "1.10.5" + he "1.1.1" + minimatch "3.0.4" + mkdirp "0.5.1" + supports-color "5.4.0" + + ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + + mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + + natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + + nopt@3.x: + version "3.0.6" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" + dependencies: + abbrev "1" + + oauth-sign@~0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + + object-assign@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + + once@1.x, once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + dependencies: + wrappy "1" + + onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + dependencies: + mimic-fn "^1.0.0" + + optimist@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + dependencies: + minimist "~0.0.1" + wordwrap "~0.0.2" + + optionator@^0.8.1, optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + + os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + + path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + + path-is-inside@^1.0.1, path-is-inside@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + + performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + + pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + + pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + + pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + + pluralize@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777" + + prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + + process-nextick-args@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" + + progress@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.0.tgz#8a1be366bf8fc23db2bd23f10c6fe920b4389d1f" + + pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + + punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + + qs@~6.5.1: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + + readable-stream@^2.2.2: + version "2.3.6" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + + regexpp@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-1.1.0.tgz#0e3516dd0b7904f413d2d4193dce4618c3a689ab" + + repeat-string@^1.5.2: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + + request@^2.79.0: + version "2.87.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.87.0.tgz#32f00235cd08d482b4d0d68db93a829c0ed5756e" + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.6.0" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.1" + forever-agent "~0.6.1" + form-data "~2.3.1" + har-validator "~5.0.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.17" + oauth-sign "~0.8.2" + performance-now "^2.1.0" + qs "~6.5.1" + safe-buffer "^5.1.1" + tough-cookie "~2.3.3" + tunnel-agent "^0.6.0" + uuid "^3.1.0" + + require-uncached@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" + dependencies: + caller-path "^0.1.0" + resolve-from "^1.0.0" + + resolve-from@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" + + resolve@1.1.x: + version "1.1.7" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + + restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + + right-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" + dependencies: + align-text "^0.1.1" + + rimraf@^2.2.8: + version "2.6.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + dependencies: + glob "^7.0.5" + + run-async@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" + dependencies: + is-promise "^2.1.0" + + rx-lite-aggregates@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" + dependencies: + rx-lite "*" + + rx-lite@*, rx-lite@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" + + safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + + "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + + semver@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab" + + shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + dependencies: + shebang-regex "^1.0.0" + + shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + + should-equal@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-2.0.0.tgz#6072cf83047360867e68e98b09d71143d04ee0c3" + dependencies: + should-type "^1.4.0" + + should-format@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/should-format/-/should-format-3.0.3.tgz#9bfc8f74fa39205c53d38c34d717303e277124f1" + dependencies: + should-type "^1.3.0" + should-type-adaptors "^1.0.1" + + should-type-adaptors@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/should-type-adaptors/-/should-type-adaptors-1.1.0.tgz#401e7f33b5533033944d5cd8bf2b65027792e27a" + dependencies: + should-type "^1.3.0" + should-util "^1.0.0" + + should-type@^1.3.0, should-type@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/should-type/-/should-type-1.4.0.tgz#0756d8ce846dfd09843a6947719dfa0d4cff5cf3" + + should-util@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/should-util/-/should-util-1.0.0.tgz#c98cda374aa6b190df8ba87c9889c2b4db620063" + + should@^13.2.1: + version "13.2.1" + resolved "https://registry.yarnpkg.com/should/-/should-13.2.1.tgz#84e6ebfbb145c79e0ae42307b25b3f62dcaf574e" + dependencies: + should-equal "^2.0.0" + should-format "^3.0.3" + should-type "^1.4.0" + should-type-adaptors "^1.0.1" + should-util "^1.0.0" + + signal-exit@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + + slice-ansi@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d" + dependencies: + is-fullwidth-code-point "^2.0.0" + + source-map@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" + dependencies: + amdefine ">=0.0.4" + + source-map@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" + dependencies: + amdefine ">=0.0.4" + + source-map@~0.5.1: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + + sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + + sshpk@^1.7.0: + version "1.14.2" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.14.2.tgz#c6fc61648a3d9c4e764fd3fcdf4ea105e492ba98" + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + dashdash "^1.12.0" + getpass "^0.1.1" + safer-buffer "^2.0.2" + optionalDependencies: + bcrypt-pbkdf "^1.0.0" + ecc-jsbn "~0.1.1" + jsbn "~0.1.0" + tweetnacl "~0.14.0" + + string-width@^2.1.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + + string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + dependencies: + safe-buffer "~5.1.0" + + strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + dependencies: + ansi-regex "^2.0.0" + + strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + dependencies: + ansi-regex "^3.0.0" + + strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + + supports-color@5.4.0, supports-color@^5.3.0: + version "5.4.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54" + dependencies: + has-flag "^3.0.0" + + supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + + supports-color@^3.1.0: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + dependencies: + has-flag "^1.0.0" + + table@4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/table/-/table-4.0.2.tgz#a33447375391e766ad34d3486e6e2aedc84d2e36" + dependencies: + ajv "^5.2.3" + ajv-keywords "^2.1.0" + chalk "^2.1.0" + lodash "^4.17.4" + slice-ansi "1.0.0" + string-width "^2.1.1" + + text-table@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + + through@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + + tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + dependencies: + os-tmpdir "~1.0.2" + + tough-cookie@~2.3.3: + version "2.3.4" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.4.tgz#ec60cee38ac675063ffc97a5c18970578ee83655" + dependencies: + punycode "^1.4.1" + + tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + dependencies: + safe-buffer "^5.0.1" + + tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + + type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + dependencies: + prelude-ls "~1.1.2" + + typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + + uglify-js@^2.6: + version "2.8.29" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" + dependencies: + source-map "~0.5.1" + yargs "~3.10.0" + optionalDependencies: + uglify-to-browserify "~1.0.0" + + uglify-to-browserify@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" + + util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + + uuid@^3.1.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.2.1.tgz#12c528bb9d58d0b9265d9a2f6f0fe8be17ff1f14" + + verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + + which@^1.1.1, which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + dependencies: + isexe "^2.0.0" + + window-size@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + + wordwrap@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" + + wordwrap@^1.0.0, wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + + wordwrap@~0.0.2: + version "0.0.3" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + + wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + + write@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" + dependencies: + mkdirp "^0.5.1" + + yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + + yargs@~3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "0.1.0" + +Trace: + Error: Command failed. + Exit code: 1 + Command: sh + Arguments: -c mocha --require coffeescript/register + Directory: /src/qix-/node-is-arrayish + Output: + + at ProcessTermError.MessageError (/Users/junon/.yarn/lib/cli.js:186:110) + at new ProcessTermError (/Users/junon/.yarn/lib/cli.js:226:113) + at ChildProcess. (/Users/junon/.yarn/lib/cli.js:30281:17) + at ChildProcess.emit (events.js:127:13) + at maybeClose (internal/child_process.js:933:16) + at Process.ChildProcess._handle.onexit (internal/child_process.js:220:5) diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE new file mode 100644 index 000000000..19129e315 --- /dev/null +++ b/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md new file mode 100644 index 000000000..435dfebb7 --- /dev/null +++ b/node_modules/lru-cache/README.md @@ -0,0 +1,166 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js new file mode 100644 index 000000000..573b6b85b --- /dev/null +++ b/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json new file mode 100644 index 000000000..43b7502c3 --- /dev/null +++ b/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/mimic-response/index.d.ts b/node_modules/mimic-response/index.d.ts new file mode 100644 index 000000000..65a51e988 --- /dev/null +++ b/node_modules/mimic-response/index.d.ts @@ -0,0 +1,17 @@ +import {IncomingMessage} from 'http'; + +/** +Mimic a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +Makes `toStream` include the properties from `fromStream`. + +@param fromStream - The stream to copy the properties from. +@param toStream - The stream to copy the properties to. +@return The same object as `toStream`. +*/ +declare function mimicResponse( + fromStream: IncomingMessage, // eslint-disable-line @typescript-eslint/prefer-readonly-parameter-types + toStream: T, +): T & IncomingMessage; + +export = mimicResponse; diff --git a/node_modules/mimic-response/index.js b/node_modules/mimic-response/index.js new file mode 100644 index 000000000..404e11113 --- /dev/null +++ b/node_modules/mimic-response/index.js @@ -0,0 +1,77 @@ +'use strict'; + +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProperties = [ + 'aborted', + 'complete', + 'headers', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'method', + 'rawHeaders', + 'rawTrailers', + 'setTimeout', + 'socket', + 'statusCode', + 'statusMessage', + 'trailers', + 'url' +]; + +module.exports = (fromStream, toStream) => { + if (toStream._readableState.autoDestroy) { + throw new Error('The second stream must have the `autoDestroy` option set to `false`'); + } + + const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); + + const properties = {}; + + for (const property of fromProperties) { + // Don't overwrite existing properties. + if (property in toStream) { + continue; + } + + properties[property] = { + get() { + const value = fromStream[property]; + const isFunction = typeof value === 'function'; + + return isFunction ? value.bind(fromStream) : value; + }, + set(value) { + fromStream[property] = value; + }, + enumerable: true, + configurable: false + }; + } + + Object.defineProperties(toStream, properties); + + fromStream.once('aborted', () => { + toStream.destroy(); + + toStream.emit('aborted'); + }); + + fromStream.once('close', () => { + if (fromStream.complete) { + if (toStream.readable) { + toStream.once('end', () => { + toStream.emit('close'); + }); + } else { + toStream.emit('close'); + } + } else { + toStream.emit('close'); + } + }); + + return toStream; +}; diff --git a/node_modules/mimic-response/license b/node_modules/mimic-response/license new file mode 100644 index 000000000..fa7ceba3e --- /dev/null +++ b/node_modules/mimic-response/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mimic-response/package.json b/node_modules/mimic-response/package.json new file mode 100644 index 000000000..d478b0f31 --- /dev/null +++ b/node_modules/mimic-response/package.json @@ -0,0 +1,42 @@ +{ + "name": "mimic-response", + "version": "3.1.0", + "description": "Mimic a Node.js HTTP response stream", + "license": "MIT", + "repository": "sindresorhus/mimic-response", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.d.ts", + "index.js" + ], + "keywords": [ + "mimic", + "response", + "stream", + "http", + "https", + "request", + "get", + "core" + ], + "devDependencies": { + "@types/node": "^14.0.1", + "ava": "^2.4.0", + "create-test-server": "^2.4.0", + "p-event": "^4.1.0", + "pify": "^5.0.0", + "tsd": "^0.11.0", + "xo": "^0.30.0" + } +} diff --git a/node_modules/mimic-response/readme.md b/node_modules/mimic-response/readme.md new file mode 100644 index 000000000..e968620a4 --- /dev/null +++ b/node_modules/mimic-response/readme.md @@ -0,0 +1,78 @@ +# mimic-response [![Build Status](https://travis-ci.com/sindresorhus/mimic-response.svg?branch=master)](https://travis-ci.com/sindresorhus/mimic-response) + +> Mimic a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +## Install + +``` +$ npm install mimic-response +``` + +## Usage + +```js +const stream = require('stream'); +const mimicResponse = require('mimic-response'); + +const responseStream = getHttpResponseStream(); +const myStream = new stream.PassThrough(); + +mimicResponse(responseStream, myStream); + +console.log(myStream.statusCode); +//=> 200 +``` + +## API + +### mimicResponse(from, to) + +**Note #1:** The `from.destroy(error)` function is not proxied. You have to call it manually: + +```js +const stream = require('stream'); +const mimicResponse = require('mimic-response'); + +const responseStream = getHttpResponseStream(); + +const myStream = new stream.PassThrough({ + destroy(error, callback) { + responseStream.destroy(); + + callback(error); + } +}); + +myStream.destroy(); +``` + +Please note that `myStream` and `responseStream` never throws. The error is passed to the request instead. + +#### from + +Type: `Stream` + +[Node.js HTTP response stream.](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +#### to + +Type: `Stream` + +Any stream. + +## Related + +- [mimic-fn](https://github.com/sindresorhus/mimic-fn) - Make a function mimic another one +- [clone-response](https://github.com/lukechilds/clone-response) - Clone a Node.js response stream + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/mkdirp-classic/LICENSE b/node_modules/mkdirp-classic/LICENSE new file mode 100644 index 000000000..f6b3a0d12 --- /dev/null +++ b/node_modules/mkdirp-classic/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 James Halliday (mail@substack.net) and Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mkdirp-classic/README.md b/node_modules/mkdirp-classic/README.md new file mode 100644 index 000000000..be5ac938d --- /dev/null +++ b/node_modules/mkdirp-classic/README.md @@ -0,0 +1,18 @@ +# mkdirp-classic + +Just a non-deprecated mirror of [mkdirp 0.5.2](https://github.com/substack/node-mkdirp/tree/0.5.1) +for use in modules where we depend on the non promise interface. + +``` +npm install mkdirp-classic +``` + +## Usage + +``` js +// See the above link +``` + +## License + +MIT diff --git a/node_modules/mkdirp-classic/index.js b/node_modules/mkdirp-classic/index.js new file mode 100644 index 000000000..6ce241b58 --- /dev/null +++ b/node_modules/mkdirp-classic/index.js @@ -0,0 +1,98 @@ +var path = require('path'); +var fs = require('fs'); +var _0777 = parseInt('0777', 8); + +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + var cb = f || function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + mkdirP(path.dirname(p), opts, function (er, made) { + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) { + throw err0; + } + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; diff --git a/node_modules/mkdirp-classic/package.json b/node_modules/mkdirp-classic/package.json new file mode 100644 index 000000000..c8b5407b9 --- /dev/null +++ b/node_modules/mkdirp-classic/package.json @@ -0,0 +1,18 @@ +{ + "name": "mkdirp-classic", + "version": "0.5.3", + "description": "Mirror of mkdirp 0.5.2", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/mkdirp-classic.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/mkdirp-classic/issues" + }, + "homepage": "https://github.com/mafintosh/mkdirp-classic" +} diff --git a/node_modules/napi-build-utils/LICENSE b/node_modules/napi-build-utils/LICENSE new file mode 100644 index 000000000..8e0248a4e --- /dev/null +++ b/node_modules/napi-build-utils/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 inspiredware + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/napi-build-utils/README.md b/node_modules/napi-build-utils/README.md new file mode 100644 index 000000000..f05600291 --- /dev/null +++ b/node_modules/napi-build-utils/README.md @@ -0,0 +1,48 @@ +# napi-build-utils + +[![npm](https://img.shields.io/npm/v/napi-build-utils.svg)](https://www.npmjs.com/package/napi-build-utils) +![Node version](https://img.shields.io/node/v/prebuild.svg) +[![Build Status](https://travis-ci.org/inspiredware/napi-build-utils.svg?branch=master)](https://travis-ci.org/inspiredware/napi-build-utils) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/) +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) + +A set of utilities to assist developers of tools that build [N-API](https://nodejs.org/api/n-api.html#n_api_n_api) native add-ons. + +## Background + +This module is targeted to developers creating tools that build N-API native add-ons. + +It implements a set of functions that aid in determining the N-API version supported by the currently running Node instance and the set of N-API versions against which the N-API native add-on is designed to be built. Other functions determine whether a particular N-API version can be built and can issue console warnings for unsupported N-API versions. + +Unlike the modules this code is designed to facilitate building, this module is written entirely in JavaScript. + +## Quick start + +```bash +$ npm install napi-build-utils +``` + +The module exports a set of functions documented [here](./index.md). For example: + +```javascript +var napiBuildUtils = require('napi-build-utils'); +var napiVersion = napiBuildUtils.getNapiVersion(); // N-API version supported by Node, or undefined. +``` + +## Declaring supported N-API versions + +Native modules that are designed to work with [N-API](https://nodejs.org/api/n-api.html#n_api_n_api) must explicitly declare the N-API version(s) against which they are coded to build. This is accomplished by including a `binary.napi_versions` property in the module's `package.json` file. For example: + +```json +"binary": { + "napi_versions": [2,3] +} +``` + +In the absence of a need to compile against a specific N-API version, the value `3` is a good choice as this is the N-API version that was supported when N-API left experimental status. + +Modules that are built against a specific N-API version will continue to operate indefinitely, even as later versions of N-API are introduced. + +## Support + +If you run into problems or limitations, please file an issue and we'll take a look. Pull requests are also welcome. diff --git a/node_modules/napi-build-utils/index.js b/node_modules/napi-build-utils/index.js new file mode 100644 index 000000000..164399486 --- /dev/null +++ b/node_modules/napi-build-utils/index.js @@ -0,0 +1,213 @@ +'use strict' +// Copyright (c) 2018 inspiredware + +var path = require('path') +var pkg = require(path.resolve('package.json')) + +var versionArray = process.version + .substr(1) + .replace(/-.*$/, '') + .split('.') + .map(function (item) { + return +item + }) + +/** + * + * A set of utilities to assist developers of tools that build + * [N-API](https://nodejs.org/api/n-api.html#n_api_n_api) native add-ons. + * + * The main repository can be found + * [here](https://github.com/inspiredware/napi-build-utils#napi-build-utils). + * + * @module napi-build-utils + */ + +/** + * Implements a consistent name of `napi` for N-API runtimes. + * + * @param {string} runtime The runtime string. + * @returns {boolean} + */ +exports.isNapiRuntime = function (runtime) { + return runtime === 'napi' +} + +/** + * Determines whether the specified N-API version is supported + * by both the currently running Node instance and the package. + * + * @param {string} napiVersion The N-API version to check. + * @returns {boolean} + */ +exports.isSupportedVersion = function (napiVersion) { + var version = parseInt(napiVersion, 10) + return version <= exports.getNapiVersion() && exports.packageSupportsVersion(version) +} + +/** + * Determines whether the specified N-API version is supported by the package. + * The N-API version must be preseent in the `package.json` + * `binary.napi_versions` array. + * + * @param {number} napiVersion The N-API version to check. + * @returns {boolean} + * @private + */ +exports.packageSupportsVersion = function (napiVersion) { + if (pkg.binary && pkg.binary.napi_versions && + pkg.binary.napi_versions instanceof Array) { + for (var i = 0; i < pkg.binary.napi_versions.length; i++) { + if (pkg.binary.napi_versions[i] === napiVersion) return true + }; + }; + return false +} + +/** + * Issues a warning to the supplied log if the N-API version is not supported + * by the current Node instance or if the N-API version is not supported + * by the package. + * + * @param {string} napiVersion The N-API version to check. + * @param {Object} log The log object to which the warnings are to be issued. + * Must implement the `warn` method. + */ +exports.logUnsupportedVersion = function (napiVersion, log) { + if (!exports.isSupportedVersion(napiVersion)) { + if (exports.packageSupportsVersion(napiVersion)) { + log.warn('This Node instance does not support N-API version ' + napiVersion) + } else { + log.warn('This package does not support N-API version ' + napiVersion) + } + } +} + +/** + * Issues warnings to the supplied log for those N-API versions not supported + * by the N-API runtime or the package. + * + * Note that this function is specific to the + * [`prebuild`](https://github.com/prebuild/prebuild#prebuild) package. + * + * `target` is the list of targets to be built and is determined in one of + * three ways from the command line arguments: + * (1) `--target` specifies a specific target to build. + * (2) `--all` specifies all N-API versions supported by the package. + * (3) Neither of these specifies to build the single "best version available." + * + * `prebuild` is an array of objects in the form `{runtime: 'napi', target: '2'}`. + * The array contains the list of N-API versions that are supported by both the + * package being built and the currently running Node instance. + * + * The objective of this function is to issue a warning for those items that appear + * in the `target` argument but not in the `prebuild` argument. + * If a specific target is supported by the package (`packageSupportsVersion`) but + * but note in `prebuild`, the assumption is that the target is not supported by + * Node. + * + * @param {(Array|string)} target The N-API version(s) to check. Target is + * @param {Array} prebuild A config object created by the `prebuild` package. + * @param {Object} log The log object to which the warnings are to be issued. + * Must implement the `warn` method. + * @private + */ +exports.logMissingNapiVersions = function (target, prebuild, log) { + if (exports.getNapiBuildVersions()) { + var targets = [].concat(target) + targets.forEach(function (napiVersion) { + if (!prebuildExists(prebuild, napiVersion)) { + if (exports.packageSupportsVersion(parseInt(napiVersion, 10))) { + log.warn('This Node instance does not support N-API version ' + napiVersion) + } else { + log.warn('This package does not support N-API version ' + napiVersion) + } + } + }) + } else { + log.error('Builds with runtime \'napi\' require a binary.napi_versions ' + + 'property on the package.json file') + } +} + +/** + * Determines whether the specified N-API version exists in the prebuild + * configuration object. + * + * Note that this function is speicifc to the `prebuild` and `prebuild-install` + * packages. + * + * @param {Object} prebuild A config object created by the `prebuild` package. + * @param {string} napiVersion The N-APi version to be checked. + * @return {boolean} + * @private + */ +var prebuildExists = function (prebuild, napiVersion) { + if (prebuild) { + for (var i = 0; i < prebuild.length; i++) { + if (prebuild[i].target === napiVersion) return true + } + } + return false +} + +/** + * Returns the best N-API version to build given the highest N-API + * version supported by the current Node instance and the N-API versions + * supported by the package, or undefined if a suitable N-API version + * cannot be determined. + * + * The best build version is the greatest N-API version supported by + * the package that is less than or equal to the highest N-API version + * supported by the current Node instance. + * + * @returns {number|undefined} + */ +exports.getBestNapiBuildVersion = function () { + var bestNapiBuildVersion = 0 + var napiBuildVersions = exports.getNapiBuildVersions(pkg) + if (napiBuildVersions) { + var ourNapiVersion = exports.getNapiVersion() + napiBuildVersions.forEach(function (napiBuildVersion) { + if (napiBuildVersion > bestNapiBuildVersion && + napiBuildVersion <= ourNapiVersion) { + bestNapiBuildVersion = napiBuildVersion + } + }) + } + return bestNapiBuildVersion === 0 ? undefined : bestNapiBuildVersion +} + +/** + * Returns an array of N-API versions supported by the package. + * + * @returns {Array} + */ +exports.getNapiBuildVersions = function () { + var napiBuildVersions = [] + // remove duplicates, convert to text + if (pkg.binary && pkg.binary.napi_versions) { + pkg.binary.napi_versions.forEach(function (napiVersion) { + var duplicated = napiBuildVersions.indexOf('' + napiVersion) !== -1 + if (!duplicated) { + napiBuildVersions.push('' + napiVersion) + } + }) + } + return napiBuildVersions.length ? napiBuildVersions : undefined +} + +/** + * Returns the highest N-API version supported by the current node instance + * or undefined if N-API is not supported. + * + * @returns {string|undefined} + */ +exports.getNapiVersion = function () { + var version = process.versions.napi // string, can be undefined + if (!version) { // this code should never need to be updated + if (versionArray[0] === 9 && versionArray[1] >= 3) version = '2' // 9.3.0+ + else if (versionArray[0] === 8) version = '1' // 8.0.0+ + } + return version +} diff --git a/node_modules/napi-build-utils/index.md b/node_modules/napi-build-utils/index.md new file mode 100644 index 000000000..ce8d3e8fa --- /dev/null +++ b/node_modules/napi-build-utils/index.md @@ -0,0 +1,81 @@ + + +## napi-build-utils +A set of utilities to assist developers of tools that build +[N-API](https://nodejs.org/api/n-api.html#n_api_n_api) native add-ons. + +The main repository can be found +[here](https://github.com/inspiredware/napi-build-utils#napi-build-utils). + + +* [napi-build-utils](#module_napi-build-utils) + * [.isNapiRuntime(runtime)](#module_napi-build-utils.isNapiRuntime) ⇒ boolean + * [.isSupportedVersion(napiVersion)](#module_napi-build-utils.isSupportedVersion) ⇒ boolean + * [.logUnsupportedVersion(napiVersion, log)](#module_napi-build-utils.logUnsupportedVersion) + * [.getBestNapiBuildVersion()](#module_napi-build-utils.getBestNapiBuildVersion) ⇒ number \| undefined + * [.getNapiBuildVersions()](#module_napi-build-utils.getNapiBuildVersions) ⇒ Array.<string> + * [.getNapiVersion()](#module_napi-build-utils.getNapiVersion) ⇒ string \| undefined + + + +### napi-build-utils.isNapiRuntime(runtime) ⇒ boolean +Implements a consistent name of `napi` for N-API runtimes. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) + +| Param | Type | Description | +| --- | --- | --- | +| runtime | string | The runtime string. | + + + +### napi-build-utils.isSupportedVersion(napiVersion) ⇒ boolean +Determines whether the specified N-API version is supported +by both the currently running Node instance and the package. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) + +| Param | Type | Description | +| --- | --- | --- | +| napiVersion | string | The N-API version to check. | + + + +### napi-build-utils.logUnsupportedVersion(napiVersion, log) +Issues a warning to the supplied log if the N-API version is not supported +by the current Node instance or if the N-API version is not supported +by the package. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) + +| Param | Type | Description | +| --- | --- | --- | +| napiVersion | string | The N-API version to check. | +| log | Object | The log object to which the warnings are to be issued. Must implement the `warn` method. | + + + +### napi-build-utils.getBestNapiBuildVersion() ⇒ number \| undefined +Returns the best N-API version to build given the highest N-API +version supported by the current Node instance and the N-API versions +supported by the package, or undefined if a suitable N-API version +cannot be determined. + +The best build version is the greatest N-API version supported by +the package that is less than or equal to the highest N-API version +supported by the current Node instance. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) + + +### napi-build-utils.getNapiBuildVersions() ⇒ Array.<string> +Returns an array of N-API versions supported by the package. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) + + +### napi-build-utils.getNapiVersion() ⇒ string \| undefined +Returns the highest N-API version supported by the current node instance +or undefined if N-API is not supported. + +**Kind**: static method of [napi-build-utils](#module_napi-build-utils) diff --git a/node_modules/napi-build-utils/package.json b/node_modules/napi-build-utils/package.json new file mode 100644 index 000000000..76cc948e9 --- /dev/null +++ b/node_modules/napi-build-utils/package.json @@ -0,0 +1,41 @@ +{ + "name": "napi-build-utils", + "version": "1.0.2", + "description": "A set of utilities to assist developers of tools that build N-API native add-ons", + "main": "index.js", + "scripts": { + "doc": "jsdoc2md index.js >index.md", + "test": "mocha test/ && npm run lint", + "lint": "standard", + "prepublishOnly": "npm run test && npm run doc" + }, + "keywords": [ + "n-api", + "prebuild", + "prebuild-install" + ], + "author": "Jim Schlight", + "license": "MIT", + "homepage": "https://github.com/inspiredware/napi-build-utils#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/inspiredware/napi-build-utils.git" + }, + "bugs": { + "url": "https://github.com/inspiredware/napi-build-utils/issues" + }, + "devDependencies": { + "chai": "^4.1.2", + "jsdoc-to-markdown": "^4.0.1", + "mocha": "^5.2.0", + "standard": "^12.0.1" + }, + "binary": { + "note": "napi-build-tools is not an N-API module. This entry is for unit testing.", + "napi_versions": [ + 2, + 2, + 3 + ] + } +} diff --git a/node_modules/node-abi/.circleci/config.yml b/node_modules/node-abi/.circleci/config.yml new file mode 100644 index 000000000..0d1b298e3 --- /dev/null +++ b/node_modules/node-abi/.circleci/config.yml @@ -0,0 +1,45 @@ +step-restore-cache: &step-restore-cache + restore_cache: + keys: + - v1-dependencies-{{ arch }}-{{ checksum "yarn.lock" }} + - v1-dependencies-{{ arch }} + +steps-test: &steps-test + steps: + - checkout + - *step-restore-cache + - run: yarn --frozen-lockfile + - save_cache: + paths: + - node_modules + key: v1-dependencies-{{ arch }}-{{ checksum "yarn.lock" }} + - run: yarn test + +version: 2.1 +orbs: + cfa: continuousauth/npm@1.0.2 +jobs: + test-linux-14: + docker: + - image: circleci/node:14 + <<: *steps-test + test-linux-16: + docker: + - image: circleci/node:16 + <<: *steps-test + +workflows: + test_and_release: + # Run test jobs first, release only when all the test jobs are successful + jobs: + - test-linux-14 + - test-linux-16 + - cfa/release: + requires: + - test-linux-14 + - test-linux-16 + filters: + branches: + only: + - main + context: cfa-release diff --git a/node_modules/node-abi/.github/workflows/update-abi.yml b/node_modules/node-abi/.github/workflows/update-abi.yml new file mode 100644 index 000000000..a1ed15970 --- /dev/null +++ b/node_modules/node-abi/.github/workflows/update-abi.yml @@ -0,0 +1,41 @@ +name: Auto-update ABI JSON file +on: + schedule: + - cron: '0 * * * *' +jobs: + autoupdate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: '12.x' + - name: Get npm cache directory + id: npm-cache + run: | + echo "::set-output name=dir::$(npm config get cache)" + - uses: actions/cache@v1 + with: + path: ${{ steps.npm-cache.outputs.dir }} + key: ${{ runner.os }}-node-${{ hashFiles('**/package.json') }} + restore-keys: | + ${{ runner.os }}-node- + - run: npm install --no-package-lock + - name: Update ABI registry + run: npm run update-abi-registry + - name: Commit Changes to ABI registry + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + echo "machine github.com login $GITHUB_ACTOR password $GITHUB_TOKEN" > ~/.netrc + chmod 600 ~/.netrc + git add abi_registry.json + if test -n "$(git status -s)"; then + git config user.name "GitHub Actions" + git config user.email "github-actions@users.noreply.github.com" + git diff --cached + git commit -m "feat: update ABI registry" + git push origin HEAD:$GITHUB_REF + else + echo No update needed + fi diff --git a/node_modules/node-abi/.releaserc.json b/node_modules/node-abi/.releaserc.json new file mode 100644 index 000000000..693aa6e18 --- /dev/null +++ b/node_modules/node-abi/.releaserc.json @@ -0,0 +1,9 @@ +{ + "plugins": [ + "@semantic-release/commit-analyzer", + "@semantic-release/release-notes-generator", + "@continuous-auth/semantic-release-npm", + "@semantic-release/github" + ], + "branches": [ "main" ] + } \ No newline at end of file diff --git a/node_modules/node-abi/CODE_OF_CONDUCT.md b/node_modules/node-abi/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..a28ec15df --- /dev/null +++ b/node_modules/node-abi/CODE_OF_CONDUCT.md @@ -0,0 +1,73 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at lukas.geiger94@gmail.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct/ + +[homepage]: https://www.contributor-covenant.org diff --git a/node_modules/node-abi/CONTRIBUTING.md b/node_modules/node-abi/CONTRIBUTING.md new file mode 100644 index 000000000..199813670 --- /dev/null +++ b/node_modules/node-abi/CONTRIBUTING.md @@ -0,0 +1,62 @@ +# Contributing to `node-abi` + +:+1::tada: First off, thanks for taking the time to contribute to `node-abi`! :tada::+1: + +## Commit Message Guidelines + +This module uses [`semantic-release`](https://github.com/semantic-release/semantic-release) to automatically release new versions via [Continuous Auth](https://continuousauth.dev/). +Therefor we have very precise rules over how our git commit messages can be formatted. + +Each commit message consists of a **header**, a **body** and a **footer**. The header has a special +format that includes a **type**, a **scope** and a **subject** ([full explanation](https://github.com/stevemao/conventional-changelog-angular/blob/master/convention.md)): + +``` +(): + + + +