Compare commits
1 Commits
main
...
21d07128f2
| Author | SHA1 | Date | |
|---|---|---|---|
| 21d07128f2 |
12
node_modules/.bin/detect-libc
generated
vendored
Normal file
12
node_modules/.bin/detect-libc
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
exec "$basedir/node" "$basedir/../detect-libc/bin/detect-libc.js" "$@"
|
||||||
|
else
|
||||||
|
exec node "$basedir/../detect-libc/bin/detect-libc.js" "$@"
|
||||||
|
fi
|
||||||
17
node_modules/.bin/detect-libc.cmd
generated
vendored
Normal file
17
node_modules/.bin/detect-libc.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
@ECHO off
|
||||||
|
GOTO start
|
||||||
|
:find_dp0
|
||||||
|
SET dp0=%~dp0
|
||||||
|
EXIT /b
|
||||||
|
:start
|
||||||
|
SETLOCAL
|
||||||
|
CALL :find_dp0
|
||||||
|
|
||||||
|
IF EXIST "%dp0%\node.exe" (
|
||||||
|
SET "_prog=%dp0%\node.exe"
|
||||||
|
) ELSE (
|
||||||
|
SET "_prog=node"
|
||||||
|
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
)
|
||||||
|
|
||||||
|
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\detect-libc\bin\detect-libc.js" %*
|
||||||
28
node_modules/.bin/detect-libc.ps1
generated
vendored
Normal file
28
node_modules/.bin/detect-libc.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||||
|
|
||||||
|
$exe=""
|
||||||
|
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||||
|
# Fix case when both the Windows and Linux builds of Node
|
||||||
|
# are installed in the same directory
|
||||||
|
$exe=".exe"
|
||||||
|
}
|
||||||
|
$ret=0
|
||||||
|
if (Test-Path "$basedir/node$exe") {
|
||||||
|
# Support pipeline input
|
||||||
|
if ($MyInvocation.ExpectingInput) {
|
||||||
|
$input | & "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
|
||||||
|
} else {
|
||||||
|
& "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
|
||||||
|
}
|
||||||
|
$ret=$LASTEXITCODE
|
||||||
|
} else {
|
||||||
|
# Support pipeline input
|
||||||
|
if ($MyInvocation.ExpectingInput) {
|
||||||
|
$input | & "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
|
||||||
|
} else {
|
||||||
|
& "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
|
||||||
|
}
|
||||||
|
$ret=$LASTEXITCODE
|
||||||
|
}
|
||||||
|
exit $ret
|
||||||
4
node_modules/.bin/node-gyp-build → node_modules/.bin/mkdirp
generated
vendored
4
node_modules/.bin/node-gyp-build → node_modules/.bin/mkdirp
generated
vendored
@@ -6,7 +6,7 @@ case `uname` in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
if [ -x "$basedir/node" ]; then
|
if [ -x "$basedir/node" ]; then
|
||||||
exec "$basedir/node" "$basedir/../node-gyp-build/bin.js" "$@"
|
exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||||
else
|
else
|
||||||
exec node "$basedir/../node-gyp-build/bin.js" "$@"
|
exec node "$basedir/../mkdirp/bin/cmd.js" "$@"
|
||||||
fi
|
fi
|
||||||
2
node_modules/.bin/node-gyp-build.cmd → node_modules/.bin/mkdirp.cmd
generated
vendored
2
node_modules/.bin/node-gyp-build.cmd → node_modules/.bin/mkdirp.cmd
generated
vendored
@@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
|
|||||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
)
|
)
|
||||||
|
|
||||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\bin.js" %*
|
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %*
|
||||||
8
node_modules/.bin/node-gyp-build.ps1 → node_modules/.bin/mkdirp.ps1
generated
vendored
8
node_modules/.bin/node-gyp-build.ps1 → node_modules/.bin/mkdirp.ps1
generated
vendored
@@ -11,17 +11,17 @@ $ret=0
|
|||||||
if (Test-Path "$basedir/node$exe") {
|
if (Test-Path "$basedir/node$exe") {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
|
$input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||||
} else {
|
} else {
|
||||||
& "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
|
& "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
} else {
|
} else {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "node$exe" "$basedir/../node-gyp-build/bin.js" $args
|
$input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||||
} else {
|
} else {
|
||||||
& "node$exe" "$basedir/../node-gyp-build/bin.js" $args
|
& "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
}
|
}
|
||||||
4
node_modules/.bin/node-gyp-build-optional → node_modules/.bin/prebuild-install
generated
vendored
4
node_modules/.bin/node-gyp-build-optional → node_modules/.bin/prebuild-install
generated
vendored
@@ -6,7 +6,7 @@ case `uname` in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
if [ -x "$basedir/node" ]; then
|
if [ -x "$basedir/node" ]; then
|
||||||
exec "$basedir/node" "$basedir/../node-gyp-build/optional.js" "$@"
|
exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@"
|
||||||
else
|
else
|
||||||
exec node "$basedir/../node-gyp-build/optional.js" "$@"
|
exec node "$basedir/../prebuild-install/bin.js" "$@"
|
||||||
fi
|
fi
|
||||||
@@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
|
|||||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
)
|
)
|
||||||
|
|
||||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\optional.js" %*
|
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %*
|
||||||
8
node_modules/.bin/node-gyp-build-test.ps1 → node_modules/.bin/prebuild-install.ps1
generated
vendored
8
node_modules/.bin/node-gyp-build-test.ps1 → node_modules/.bin/prebuild-install.ps1
generated
vendored
@@ -11,17 +11,17 @@ $ret=0
|
|||||||
if (Test-Path "$basedir/node$exe") {
|
if (Test-Path "$basedir/node$exe") {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
|
$input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
|
||||||
} else {
|
} else {
|
||||||
& "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
|
& "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
} else {
|
} else {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
|
$input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args
|
||||||
} else {
|
} else {
|
||||||
& "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
|
& "node$exe" "$basedir/../prebuild-install/bin.js" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
}
|
}
|
||||||
4
node_modules/.bin/node-gyp-build-test → node_modules/.bin/rc
generated
vendored
4
node_modules/.bin/node-gyp-build-test → node_modules/.bin/rc
generated
vendored
@@ -6,7 +6,7 @@ case `uname` in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
if [ -x "$basedir/node" ]; then
|
if [ -x "$basedir/node" ]; then
|
||||||
exec "$basedir/node" "$basedir/../node-gyp-build/build-test.js" "$@"
|
exec "$basedir/node" "$basedir/../rc/cli.js" "$@"
|
||||||
else
|
else
|
||||||
exec node "$basedir/../node-gyp-build/build-test.js" "$@"
|
exec node "$basedir/../rc/cli.js" "$@"
|
||||||
fi
|
fi
|
||||||
2
node_modules/.bin/node-gyp-build-test.cmd → node_modules/.bin/rc.cmd
generated
vendored
2
node_modules/.bin/node-gyp-build-test.cmd → node_modules/.bin/rc.cmd
generated
vendored
@@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
|
|||||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
)
|
)
|
||||||
|
|
||||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\build-test.js" %*
|
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %*
|
||||||
28
node_modules/.bin/rc.ps1
generated
vendored
Normal file
28
node_modules/.bin/rc.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||||
|
|
||||||
|
$exe=""
|
||||||
|
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||||
|
# Fix case when both the Windows and Linux builds of Node
|
||||||
|
# are installed in the same directory
|
||||||
|
$exe=".exe"
|
||||||
|
}
|
||||||
|
$ret=0
|
||||||
|
if (Test-Path "$basedir/node$exe") {
|
||||||
|
# Support pipeline input
|
||||||
|
if ($MyInvocation.ExpectingInput) {
|
||||||
|
$input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args
|
||||||
|
} else {
|
||||||
|
& "$basedir/node$exe" "$basedir/../rc/cli.js" $args
|
||||||
|
}
|
||||||
|
$ret=$LASTEXITCODE
|
||||||
|
} else {
|
||||||
|
# Support pipeline input
|
||||||
|
if ($MyInvocation.ExpectingInput) {
|
||||||
|
$input | & "node$exe" "$basedir/../rc/cli.js" $args
|
||||||
|
} else {
|
||||||
|
& "node$exe" "$basedir/../rc/cli.js" $args
|
||||||
|
}
|
||||||
|
$ret=$LASTEXITCODE
|
||||||
|
}
|
||||||
|
exit $ret
|
||||||
12
node_modules/.bin/semver
generated
vendored
Normal file
12
node_modules/.bin/semver
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
exec "$basedir/node" "$basedir/../semver/bin/semver" "$@"
|
||||||
|
else
|
||||||
|
exec node "$basedir/../semver/bin/semver" "$@"
|
||||||
|
fi
|
||||||
17
node_modules/.bin/semver.cmd
generated
vendored
Normal file
17
node_modules/.bin/semver.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
@ECHO off
|
||||||
|
GOTO start
|
||||||
|
:find_dp0
|
||||||
|
SET dp0=%~dp0
|
||||||
|
EXIT /b
|
||||||
|
:start
|
||||||
|
SETLOCAL
|
||||||
|
CALL :find_dp0
|
||||||
|
|
||||||
|
IF EXIST "%dp0%\node.exe" (
|
||||||
|
SET "_prog=%dp0%\node.exe"
|
||||||
|
) ELSE (
|
||||||
|
SET "_prog=node"
|
||||||
|
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
)
|
||||||
|
|
||||||
|
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver" %*
|
||||||
8
node_modules/.bin/node-gyp-build-optional.ps1 → node_modules/.bin/semver.ps1
generated
vendored
8
node_modules/.bin/node-gyp-build-optional.ps1 → node_modules/.bin/semver.ps1
generated
vendored
@@ -11,17 +11,17 @@ $ret=0
|
|||||||
if (Test-Path "$basedir/node$exe") {
|
if (Test-Path "$basedir/node$exe") {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
|
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver" $args
|
||||||
} else {
|
} else {
|
||||||
& "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
|
& "$basedir/node$exe" "$basedir/../semver/bin/semver" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
} else {
|
} else {
|
||||||
# Support pipeline input
|
# Support pipeline input
|
||||||
if ($MyInvocation.ExpectingInput) {
|
if ($MyInvocation.ExpectingInput) {
|
||||||
$input | & "node$exe" "$basedir/../node-gyp-build/optional.js" $args
|
$input | & "node$exe" "$basedir/../semver/bin/semver" $args
|
||||||
} else {
|
} else {
|
||||||
& "node$exe" "$basedir/../node-gyp-build/optional.js" $args
|
& "node$exe" "$basedir/../semver/bin/semver" $args
|
||||||
}
|
}
|
||||||
$ret=$LASTEXITCODE
|
$ret=$LASTEXITCODE
|
||||||
}
|
}
|
||||||
1008
node_modules/.package-lock.json
generated
vendored
1008
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
60
node_modules/@isaacs/balanced-match/README.md
generated
vendored
60
node_modules/@isaacs/balanced-match/README.md
generated
vendored
@@ -1,60 +0,0 @@
|
|||||||
# @isaacs/balanced-match
|
|
||||||
|
|
||||||
A hybrid CJS/ESM TypeScript fork of
|
|
||||||
[balanced-match](http://npm.im/balanced-match).
|
|
||||||
|
|
||||||
Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
|
|
||||||
|
|
||||||
[](https://github.com/juliangruber/balanced-match/actions/workflows/ci.yml)
|
|
||||||
[](https://www.npmjs.org/package/balanced-match)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
Get the first matching pair of braces:
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { balanced } from '@isaacs/balanced-match'
|
|
||||||
|
|
||||||
console.log(balanced('{', '}', 'pre{in{nested}}post'))
|
|
||||||
console.log(balanced('{', '}', 'pre{first}between{second}post'))
|
|
||||||
console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'))
|
|
||||||
```
|
|
||||||
|
|
||||||
The matches are:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ node example.js
|
|
||||||
{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
|
|
||||||
{ start: 3,
|
|
||||||
end: 9,
|
|
||||||
pre: 'pre',
|
|
||||||
body: 'first',
|
|
||||||
post: 'between{second}post' }
|
|
||||||
{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
|
|
||||||
```
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
### const m = balanced(a, b, str)
|
|
||||||
|
|
||||||
For the first non-nested matching pair of `a` and `b` in `str`, return an
|
|
||||||
object with those keys:
|
|
||||||
|
|
||||||
- **start** the index of the first match of `a`
|
|
||||||
- **end** the index of the matching `b`
|
|
||||||
- **pre** the preamble, `a` and `b` not included
|
|
||||||
- **body** the match, `a` and `b` not included
|
|
||||||
- **post** the postscript, `a` and `b` not included
|
|
||||||
|
|
||||||
If there's no match, `undefined` will be returned.
|
|
||||||
|
|
||||||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
|
|
||||||
|
|
||||||
### const r = balanced.range(a, b, str)
|
|
||||||
|
|
||||||
For the first non-nested matching pair of `a` and `b` in `str`, return an
|
|
||||||
array with indexes: `[ <a index>, <b index> ]`.
|
|
||||||
|
|
||||||
If there's no match, `undefined` will be returned.
|
|
||||||
|
|
||||||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
|
|
||||||
9
node_modules/@isaacs/balanced-match/dist/commonjs/index.d.ts
generated
vendored
9
node_modules/@isaacs/balanced-match/dist/commonjs/index.d.ts
generated
vendored
@@ -1,9 +0,0 @@
|
|||||||
export declare const balanced: (a: string | RegExp, b: string | RegExp, str: string) => false | {
|
|
||||||
start: number;
|
|
||||||
end: number;
|
|
||||||
pre: string;
|
|
||||||
body: string;
|
|
||||||
post: string;
|
|
||||||
} | undefined;
|
|
||||||
export declare const range: (a: string, b: string, str: string) => undefined | [number, number];
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/balanced-match/dist/commonjs/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/balanced-match/dist/commonjs/index.d.ts.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,QAAQ,GACnB,GAAG,MAAM,GAAG,MAAM,EAClB,GAAG,MAAM,GAAG,MAAM,EAClB,KAAK,MAAM;;;;;;aAgBZ,CAAA;AAOD,eAAO,MAAM,KAAK,GAChB,GAAG,MAAM,EACT,GAAG,MAAM,EACT,KAAK,MAAM,KACV,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,CA2C7B,CAAA"}
|
|
||||||
59
node_modules/@isaacs/balanced-match/dist/commonjs/index.js
generated
vendored
59
node_modules/@isaacs/balanced-match/dist/commonjs/index.js
generated
vendored
@@ -1,59 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.range = exports.balanced = void 0;
|
|
||||||
const balanced = (a, b, str) => {
|
|
||||||
const ma = a instanceof RegExp ? maybeMatch(a, str) : a;
|
|
||||||
const mb = b instanceof RegExp ? maybeMatch(b, str) : b;
|
|
||||||
const r = ma !== null && mb != null && (0, exports.range)(ma, mb, str);
|
|
||||||
return (r && {
|
|
||||||
start: r[0],
|
|
||||||
end: r[1],
|
|
||||||
pre: str.slice(0, r[0]),
|
|
||||||
body: str.slice(r[0] + ma.length, r[1]),
|
|
||||||
post: str.slice(r[1] + mb.length),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
exports.balanced = balanced;
|
|
||||||
const maybeMatch = (reg, str) => {
|
|
||||||
const m = str.match(reg);
|
|
||||||
return m ? m[0] : null;
|
|
||||||
};
|
|
||||||
const range = (a, b, str) => {
|
|
||||||
let begs, beg, left, right = undefined, result;
|
|
||||||
let ai = str.indexOf(a);
|
|
||||||
let bi = str.indexOf(b, ai + 1);
|
|
||||||
let i = ai;
|
|
||||||
if (ai >= 0 && bi > 0) {
|
|
||||||
if (a === b) {
|
|
||||||
return [ai, bi];
|
|
||||||
}
|
|
||||||
begs = [];
|
|
||||||
left = str.length;
|
|
||||||
while (i >= 0 && !result) {
|
|
||||||
if (i === ai) {
|
|
||||||
begs.push(i);
|
|
||||||
ai = str.indexOf(a, i + 1);
|
|
||||||
}
|
|
||||||
else if (begs.length === 1) {
|
|
||||||
const r = begs.pop();
|
|
||||||
if (r !== undefined)
|
|
||||||
result = [r, bi];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
beg = begs.pop();
|
|
||||||
if (beg !== undefined && beg < left) {
|
|
||||||
left = beg;
|
|
||||||
right = bi;
|
|
||||||
}
|
|
||||||
bi = str.indexOf(b, i + 1);
|
|
||||||
}
|
|
||||||
i = ai < bi && ai >= 0 ? ai : bi;
|
|
||||||
}
|
|
||||||
if (begs.length && right !== undefined) {
|
|
||||||
result = [left, right];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
exports.range = range;
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/balanced-match/dist/commonjs/index.js.map
generated
vendored
1
node_modules/@isaacs/balanced-match/dist/commonjs/index.js.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAO,MAAM,QAAQ,GAAG,CACtB,CAAkB,EAClB,CAAkB,EAClB,GAAW,EACX,EAAE;IACF,MAAM,EAAE,GAAG,CAAC,YAAY,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACvD,MAAM,EAAE,GAAG,CAAC,YAAY,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAEvD,MAAM,CAAC,GAAG,EAAE,KAAK,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI,IAAA,aAAK,EAAC,EAAE,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;IAEzD,OAAO,CACL,CAAC,IAAI;QACH,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC;QACX,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;QACT,GAAG,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QACvB,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QACvC,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC;KAClC,CACF,CAAA;AACH,CAAC,CAAA;AAnBY,QAAA,QAAQ,YAmBpB;AAED,MAAM,UAAU,GAAG,CAAC,GAAW,EAAE,GAAW,EAAE,EAAE;IAC9C,MAAM,CAAC,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACxB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;AACxB,CAAC,CAAA;AAEM,MAAM,KAAK,GAAG,CACnB,CAAS,EACT,CAAS,EACT,GAAW,EACmB,EAAE;IAChC,IAAI,IAAc,EAChB,GAAuB,EACvB,IAAY,EACZ,KAAK,GAAuB,SAAS,EACrC,MAAoC,CAAA;IACtC,IAAI,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IACvB,IAAI,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAA;IAC/B,IAAI,CAAC,GAAG,EAAE,CAAA;IAEV,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACtB,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YACZ,OAAO,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QACjB,CAAC;QACD,IAAI,GAAG,EAAE,CAAA;QACT,IAAI,GAAG,GAAG,CAAC,MAAM,CAAA;QAEjB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACzB,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;gBACb,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBACZ,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;YAC5B,CAAC;iBAAM,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBAC7B,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;gBACpB,IAAI,CAAC,KAAK,SAAS;oBAAE,MAAM,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;YACvC,CAAC;iBAAM,CAAC;gBACN,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;gBAChB,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,GAAG,IAAI,EAAE,CAAC;oBACpC,IAAI,GAAG,GAAG,CAAA;oBACV,KAAK,GAAG,EAAE,CAAA;gBACZ,CAAC;gBAED,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;YAC5B,CAAC;YAED,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;QAClC,CAAC;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACvC,MAAM,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QACxB,CAAC;IACH,CAAC;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AA/CY,QAAA,KAAK,SA+CjB","sourcesContent":["export const balanced = (\n a: string | RegExp,\n b: string | RegExp,\n str: string,\n) => {\n const ma = a instanceof RegExp ? maybeMatch(a, str) : a\n const mb = b instanceof RegExp ? maybeMatch(b, str) : b\n\n const r = ma !== null && mb != null && range(ma, mb, str)\n\n return (\n r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + ma.length, r[1]),\n post: str.slice(r[1] + mb.length),\n }\n )\n}\n\nconst maybeMatch = (reg: RegExp, str: string) => {\n const m = str.match(reg)\n return m ? m[0] : null\n}\n\nexport const range = (\n a: string,\n b: string,\n str: string,\n): undefined | [number, number] => {\n let begs: number[],\n beg: number | undefined,\n left: number,\n right: number | undefined = undefined,\n result: undefined | [number, number]\n let ai = str.indexOf(a)\n let bi = str.indexOf(b, ai + 1)\n let i = ai\n\n if (ai >= 0 && bi > 0) {\n if (a === b) {\n return [ai, bi]\n }\n begs = []\n left = str.length\n\n while (i >= 0 && !result) {\n if (i === ai) {\n begs.push(i)\n ai = str.indexOf(a, i + 1)\n } else if (begs.length === 1) {\n const r = begs.pop()\n if (r !== undefined) result = [r, bi]\n } else {\n beg = begs.pop()\n if (beg !== undefined && beg < left) {\n left = beg\n right = bi\n }\n\n bi = str.indexOf(b, i + 1)\n }\n\n i = ai < bi && ai >= 0 ? ai : bi\n }\n\n if (begs.length && right !== undefined) {\n result = [left, right]\n }\n }\n\n return result\n}\n"]}
|
|
||||||
3
node_modules/@isaacs/balanced-match/dist/commonjs/package.json
generated
vendored
3
node_modules/@isaacs/balanced-match/dist/commonjs/package.json
generated
vendored
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "commonjs"
|
|
||||||
}
|
|
||||||
9
node_modules/@isaacs/balanced-match/dist/esm/index.d.ts
generated
vendored
9
node_modules/@isaacs/balanced-match/dist/esm/index.d.ts
generated
vendored
@@ -1,9 +0,0 @@
|
|||||||
export declare const balanced: (a: string | RegExp, b: string | RegExp, str: string) => false | {
|
|
||||||
start: number;
|
|
||||||
end: number;
|
|
||||||
pre: string;
|
|
||||||
body: string;
|
|
||||||
post: string;
|
|
||||||
} | undefined;
|
|
||||||
export declare const range: (a: string, b: string, str: string) => undefined | [number, number];
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/balanced-match/dist/esm/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/balanced-match/dist/esm/index.d.ts.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,QAAQ,GACnB,GAAG,MAAM,GAAG,MAAM,EAClB,GAAG,MAAM,GAAG,MAAM,EAClB,KAAK,MAAM;;;;;;aAgBZ,CAAA;AAOD,eAAO,MAAM,KAAK,GAChB,GAAG,MAAM,EACT,GAAG,MAAM,EACT,KAAK,MAAM,KACV,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,CA2C7B,CAAA"}
|
|
||||||
54
node_modules/@isaacs/balanced-match/dist/esm/index.js
generated
vendored
54
node_modules/@isaacs/balanced-match/dist/esm/index.js
generated
vendored
@@ -1,54 +0,0 @@
|
|||||||
export const balanced = (a, b, str) => {
|
|
||||||
const ma = a instanceof RegExp ? maybeMatch(a, str) : a;
|
|
||||||
const mb = b instanceof RegExp ? maybeMatch(b, str) : b;
|
|
||||||
const r = ma !== null && mb != null && range(ma, mb, str);
|
|
||||||
return (r && {
|
|
||||||
start: r[0],
|
|
||||||
end: r[1],
|
|
||||||
pre: str.slice(0, r[0]),
|
|
||||||
body: str.slice(r[0] + ma.length, r[1]),
|
|
||||||
post: str.slice(r[1] + mb.length),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
const maybeMatch = (reg, str) => {
|
|
||||||
const m = str.match(reg);
|
|
||||||
return m ? m[0] : null;
|
|
||||||
};
|
|
||||||
export const range = (a, b, str) => {
|
|
||||||
let begs, beg, left, right = undefined, result;
|
|
||||||
let ai = str.indexOf(a);
|
|
||||||
let bi = str.indexOf(b, ai + 1);
|
|
||||||
let i = ai;
|
|
||||||
if (ai >= 0 && bi > 0) {
|
|
||||||
if (a === b) {
|
|
||||||
return [ai, bi];
|
|
||||||
}
|
|
||||||
begs = [];
|
|
||||||
left = str.length;
|
|
||||||
while (i >= 0 && !result) {
|
|
||||||
if (i === ai) {
|
|
||||||
begs.push(i);
|
|
||||||
ai = str.indexOf(a, i + 1);
|
|
||||||
}
|
|
||||||
else if (begs.length === 1) {
|
|
||||||
const r = begs.pop();
|
|
||||||
if (r !== undefined)
|
|
||||||
result = [r, bi];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
beg = begs.pop();
|
|
||||||
if (beg !== undefined && beg < left) {
|
|
||||||
left = beg;
|
|
||||||
right = bi;
|
|
||||||
}
|
|
||||||
bi = str.indexOf(b, i + 1);
|
|
||||||
}
|
|
||||||
i = ai < bi && ai >= 0 ? ai : bi;
|
|
||||||
}
|
|
||||||
if (begs.length && right !== undefined) {
|
|
||||||
result = [left, right];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/balanced-match/dist/esm/index.js.map
generated
vendored
1
node_modules/@isaacs/balanced-match/dist/esm/index.js.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,QAAQ,GAAG,CACtB,CAAkB,EAClB,CAAkB,EAClB,GAAW,EACX,EAAE;IACF,MAAM,EAAE,GAAG,CAAC,YAAY,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACvD,MAAM,EAAE,GAAG,CAAC,YAAY,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAEvD,MAAM,CAAC,GAAG,EAAE,KAAK,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI,KAAK,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;IAEzD,OAAO,CACL,CAAC,IAAI;QACH,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC;QACX,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;QACT,GAAG,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QACvB,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QACvC,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC;KAClC,CACF,CAAA;AACH,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,CAAC,GAAW,EAAE,GAAW,EAAE,EAAE;IAC9C,MAAM,CAAC,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACxB,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;AACxB,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,KAAK,GAAG,CACnB,CAAS,EACT,CAAS,EACT,GAAW,EACmB,EAAE;IAChC,IAAI,IAAc,EAChB,GAAuB,EACvB,IAAY,EACZ,KAAK,GAAuB,SAAS,EACrC,MAAoC,CAAA;IACtC,IAAI,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IACvB,IAAI,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,CAAA;IAC/B,IAAI,CAAC,GAAG,EAAE,CAAA;IAEV,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACtB,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;YACZ,OAAO,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QACjB,CAAC;QACD,IAAI,GAAG,EAAE,CAAA;QACT,IAAI,GAAG,GAAG,CAAC,MAAM,CAAA;QAEjB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACzB,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;gBACb,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBACZ,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;YAC5B,CAAC;iBAAM,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBAC7B,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;gBACpB,IAAI,CAAC,KAAK,SAAS;oBAAE,MAAM,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;YACvC,CAAC;iBAAM,CAAC;gBACN,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;gBAChB,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,GAAG,IAAI,EAAE,CAAC;oBACpC,IAAI,GAAG,GAAG,CAAA;oBACV,KAAK,GAAG,EAAE,CAAA;gBACZ,CAAC;gBAED,EAAE,GAAG,GAAG,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAA;YAC5B,CAAC;YAED,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;QAClC,CAAC;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACvC,MAAM,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QACxB,CAAC;IACH,CAAC;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA","sourcesContent":["export const balanced = (\n a: string | RegExp,\n b: string | RegExp,\n str: string,\n) => {\n const ma = a instanceof RegExp ? maybeMatch(a, str) : a\n const mb = b instanceof RegExp ? maybeMatch(b, str) : b\n\n const r = ma !== null && mb != null && range(ma, mb, str)\n\n return (\n r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + ma.length, r[1]),\n post: str.slice(r[1] + mb.length),\n }\n )\n}\n\nconst maybeMatch = (reg: RegExp, str: string) => {\n const m = str.match(reg)\n return m ? m[0] : null\n}\n\nexport const range = (\n a: string,\n b: string,\n str: string,\n): undefined | [number, number] => {\n let begs: number[],\n beg: number | undefined,\n left: number,\n right: number | undefined = undefined,\n result: undefined | [number, number]\n let ai = str.indexOf(a)\n let bi = str.indexOf(b, ai + 1)\n let i = ai\n\n if (ai >= 0 && bi > 0) {\n if (a === b) {\n return [ai, bi]\n }\n begs = []\n left = str.length\n\n while (i >= 0 && !result) {\n if (i === ai) {\n begs.push(i)\n ai = str.indexOf(a, i + 1)\n } else if (begs.length === 1) {\n const r = begs.pop()\n if (r !== undefined) result = [r, bi]\n } else {\n beg = begs.pop()\n if (beg !== undefined && beg < left) {\n left = beg\n right = bi\n }\n\n bi = str.indexOf(b, i + 1)\n }\n\n i = ai < bi && ai >= 0 ? ai : bi\n }\n\n if (begs.length && right !== undefined) {\n result = [left, right]\n }\n }\n\n return result\n}\n"]}
|
|
||||||
3
node_modules/@isaacs/balanced-match/dist/esm/package.json
generated
vendored
3
node_modules/@isaacs/balanced-match/dist/esm/package.json
generated
vendored
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "module"
|
|
||||||
}
|
|
||||||
79
node_modules/@isaacs/balanced-match/package.json
generated
vendored
79
node_modules/@isaacs/balanced-match/package.json
generated
vendored
@@ -1,79 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@isaacs/balanced-match",
|
|
||||||
"description": "Match balanced character pairs, like \"{\" and \"}\"",
|
|
||||||
"version": "4.0.1",
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git://github.com/isaacs/balanced-match.git"
|
|
||||||
},
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": {
|
|
||||||
"import": {
|
|
||||||
"types": "./dist/esm/index.d.ts",
|
|
||||||
"default": "./dist/esm/index.js"
|
|
||||||
},
|
|
||||||
"require": {
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"default": "./dist/commonjs/index.js"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"preversion": "npm test",
|
|
||||||
"postversion": "npm publish",
|
|
||||||
"prepublishOnly": "git push origin --follow-tags",
|
|
||||||
"prepare": "tshy",
|
|
||||||
"pretest": "npm run prepare",
|
|
||||||
"presnap": "npm run prepare",
|
|
||||||
"test": "tap",
|
|
||||||
"snap": "tap",
|
|
||||||
"format": "prettier --write . --loglevel warn",
|
|
||||||
"benchmark": "node benchmark/index.js",
|
|
||||||
"typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
|
|
||||||
},
|
|
||||||
"prettier": {
|
|
||||||
"semi": false,
|
|
||||||
"printWidth": 80,
|
|
||||||
"tabWidth": 2,
|
|
||||||
"useTabs": false,
|
|
||||||
"singleQuote": true,
|
|
||||||
"jsxSingleQuote": false,
|
|
||||||
"bracketSameLine": true,
|
|
||||||
"arrowParens": "avoid",
|
|
||||||
"endOfLine": "lf"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/brace-expansion": "^1.1.2",
|
|
||||||
"@types/node": "^24.0.0",
|
|
||||||
"mkdirp": "^3.0.1",
|
|
||||||
"prettier": "^3.3.2",
|
|
||||||
"tap": "^21.1.0",
|
|
||||||
"tshy": "^3.0.2",
|
|
||||||
"typedoc": "^0.28.5"
|
|
||||||
},
|
|
||||||
"keywords": [
|
|
||||||
"match",
|
|
||||||
"regexp",
|
|
||||||
"test",
|
|
||||||
"balanced",
|
|
||||||
"parse"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": "20 || >=22"
|
|
||||||
},
|
|
||||||
"tshy": {
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"main": "./dist/commonjs/index.js",
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"module": "./dist/esm/index.js"
|
|
||||||
}
|
|
||||||
23
node_modules/@isaacs/brace-expansion/LICENSE
generated
vendored
23
node_modules/@isaacs/brace-expansion/LICENSE
generated
vendored
@@ -1,23 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright Julian Gruber <julian@juliangruber.com>
|
|
||||||
|
|
||||||
TypeScript port Copyright Isaac Z. Schlueter <i@izs.me>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
86
node_modules/@isaacs/brace-expansion/README.md
generated
vendored
86
node_modules/@isaacs/brace-expansion/README.md
generated
vendored
@@ -1,86 +0,0 @@
|
|||||||
# @isaacs/brace-expansion
|
|
||||||
|
|
||||||
A hybrid CJS/ESM TypeScript fork of
|
|
||||||
[brace-expansion](http://npm.im/brace-expansion).
|
|
||||||
|
|
||||||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
|
||||||
as known from sh/bash, in JavaScript.
|
|
||||||
|
|
||||||
[](https://github.com/juliangruber/brace-expansion/actions/workflows/ci.yml)
|
|
||||||
[](https://www.npmjs.org/package/brace-expansion)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { expand } from '@isaacs/brace-expansion'
|
|
||||||
|
|
||||||
expand('file-{a,b,c}.jpg')
|
|
||||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
|
||||||
|
|
||||||
expand('-v{,,}')
|
|
||||||
// => ['-v', '-v', '-v']
|
|
||||||
|
|
||||||
expand('file{0..2}.jpg')
|
|
||||||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
|
||||||
|
|
||||||
expand('file-{a..c}.jpg')
|
|
||||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
|
||||||
|
|
||||||
expand('file{2..0}.jpg')
|
|
||||||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
|
||||||
|
|
||||||
expand('file{0..4..2}.jpg')
|
|
||||||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
|
||||||
|
|
||||||
expand('file-{a..e..2}.jpg')
|
|
||||||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
|
||||||
|
|
||||||
expand('file{00..10..5}.jpg')
|
|
||||||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
|
||||||
|
|
||||||
expand('{{A..C},{a..c}}')
|
|
||||||
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
|
||||||
|
|
||||||
expand('ppp{,config,oe{,conf}}')
|
|
||||||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
|
||||||
```
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { expand } from '@isaacs/brace-expansion'
|
|
||||||
```
|
|
||||||
|
|
||||||
### const expanded = expand(str)
|
|
||||||
|
|
||||||
Return an array of all possible and valid expansions of `str`. If none are
|
|
||||||
found, `[str]` is returned.
|
|
||||||
|
|
||||||
Valid expansions are:
|
|
||||||
|
|
||||||
```js
|
|
||||||
/^(.*,)+(.+)?$/
|
|
||||||
// {a,b,...}
|
|
||||||
```
|
|
||||||
|
|
||||||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
|
||||||
|
|
||||||
```js
|
|
||||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
|
||||||
// {x..y[..incr]}
|
|
||||||
```
|
|
||||||
|
|
||||||
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
|
||||||
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
|
||||||
to have equal length. Negative numbers and backwards iteration work too.
|
|
||||||
|
|
||||||
```js
|
|
||||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
|
||||||
// {x..y[..incr]}
|
|
||||||
```
|
|
||||||
|
|
||||||
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
|
||||||
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
|
||||||
number.
|
|
||||||
|
|
||||||
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
|
||||||
2
node_modules/@isaacs/brace-expansion/dist/commonjs/index.d.ts
generated
vendored
2
node_modules/@isaacs/brace-expansion/dist/commonjs/index.d.ts
generated
vendored
@@ -1,2 +0,0 @@
|
|||||||
export declare function expand(str: string): string[];
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/brace-expansion/dist/commonjs/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/brace-expansion/dist/commonjs/index.d.ts.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAwEA,wBAAgB,MAAM,CAAC,GAAG,EAAE,MAAM,YAgBjC"}
|
|
||||||
196
node_modules/@isaacs/brace-expansion/dist/commonjs/index.js
generated
vendored
196
node_modules/@isaacs/brace-expansion/dist/commonjs/index.js
generated
vendored
@@ -1,196 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.expand = expand;
|
|
||||||
const balanced_match_1 = require("@isaacs/balanced-match");
|
|
||||||
const escSlash = '\0SLASH' + Math.random() + '\0';
|
|
||||||
const escOpen = '\0OPEN' + Math.random() + '\0';
|
|
||||||
const escClose = '\0CLOSE' + Math.random() + '\0';
|
|
||||||
const escComma = '\0COMMA' + Math.random() + '\0';
|
|
||||||
const escPeriod = '\0PERIOD' + Math.random() + '\0';
|
|
||||||
const escSlashPattern = new RegExp(escSlash, 'g');
|
|
||||||
const escOpenPattern = new RegExp(escOpen, 'g');
|
|
||||||
const escClosePattern = new RegExp(escClose, 'g');
|
|
||||||
const escCommaPattern = new RegExp(escComma, 'g');
|
|
||||||
const escPeriodPattern = new RegExp(escPeriod, 'g');
|
|
||||||
const slashPattern = /\\\\/g;
|
|
||||||
const openPattern = /\\{/g;
|
|
||||||
const closePattern = /\\}/g;
|
|
||||||
const commaPattern = /\\,/g;
|
|
||||||
const periodPattern = /\\./g;
|
|
||||||
function numeric(str) {
|
|
||||||
return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0);
|
|
||||||
}
|
|
||||||
function escapeBraces(str) {
|
|
||||||
return str
|
|
||||||
.replace(slashPattern, escSlash)
|
|
||||||
.replace(openPattern, escOpen)
|
|
||||||
.replace(closePattern, escClose)
|
|
||||||
.replace(commaPattern, escComma)
|
|
||||||
.replace(periodPattern, escPeriod);
|
|
||||||
}
|
|
||||||
function unescapeBraces(str) {
|
|
||||||
return str
|
|
||||||
.replace(escSlashPattern, '\\')
|
|
||||||
.replace(escOpenPattern, '{')
|
|
||||||
.replace(escClosePattern, '}')
|
|
||||||
.replace(escCommaPattern, ',')
|
|
||||||
.replace(escPeriodPattern, '.');
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Basically just str.split(","), but handling cases
|
|
||||||
* where we have nested braced sections, which should be
|
|
||||||
* treated as individual members, like {a,{b,c},d}
|
|
||||||
*/
|
|
||||||
function parseCommaParts(str) {
|
|
||||||
if (!str) {
|
|
||||||
return [''];
|
|
||||||
}
|
|
||||||
const parts = [];
|
|
||||||
const m = (0, balanced_match_1.balanced)('{', '}', str);
|
|
||||||
if (!m) {
|
|
||||||
return str.split(',');
|
|
||||||
}
|
|
||||||
const { pre, body, post } = m;
|
|
||||||
const p = pre.split(',');
|
|
||||||
p[p.length - 1] += '{' + body + '}';
|
|
||||||
const postParts = parseCommaParts(post);
|
|
||||||
if (post.length) {
|
|
||||||
;
|
|
||||||
p[p.length - 1] += postParts.shift();
|
|
||||||
p.push.apply(p, postParts);
|
|
||||||
}
|
|
||||||
parts.push.apply(parts, p);
|
|
||||||
return parts;
|
|
||||||
}
|
|
||||||
function expand(str) {
|
|
||||||
if (!str) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
// I don't know why Bash 4.3 does this, but it does.
|
|
||||||
// Anything starting with {} will have the first two bytes preserved
|
|
||||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
|
||||||
// but a{},b}c will be expanded to [a}c,abc].
|
|
||||||
// One could argue that this is a bug in Bash, but since the goal of
|
|
||||||
// this module is to match Bash's rules, we escape a leading {}
|
|
||||||
if (str.slice(0, 2) === '{}') {
|
|
||||||
str = '\\{\\}' + str.slice(2);
|
|
||||||
}
|
|
||||||
return expand_(escapeBraces(str), true).map(unescapeBraces);
|
|
||||||
}
|
|
||||||
function embrace(str) {
|
|
||||||
return '{' + str + '}';
|
|
||||||
}
|
|
||||||
function isPadded(el) {
|
|
||||||
return /^-?0\d/.test(el);
|
|
||||||
}
|
|
||||||
function lte(i, y) {
|
|
||||||
return i <= y;
|
|
||||||
}
|
|
||||||
function gte(i, y) {
|
|
||||||
return i >= y;
|
|
||||||
}
|
|
||||||
function expand_(str, isTop) {
|
|
||||||
/** @type {string[]} */
|
|
||||||
const expansions = [];
|
|
||||||
const m = (0, balanced_match_1.balanced)('{', '}', str);
|
|
||||||
if (!m)
|
|
||||||
return [str];
|
|
||||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
|
||||||
const pre = m.pre;
|
|
||||||
const post = m.post.length ? expand_(m.post, false) : [''];
|
|
||||||
if (/\$$/.test(m.pre)) {
|
|
||||||
for (let k = 0; k < post.length; k++) {
|
|
||||||
const expansion = pre + '{' + m.body + '}' + post[k];
|
|
||||||
expansions.push(expansion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
|
||||||
const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
|
||||||
const isSequence = isNumericSequence || isAlphaSequence;
|
|
||||||
const isOptions = m.body.indexOf(',') >= 0;
|
|
||||||
if (!isSequence && !isOptions) {
|
|
||||||
// {a},b}
|
|
||||||
if (m.post.match(/,(?!,).*\}/)) {
|
|
||||||
str = m.pre + '{' + m.body + escClose + m.post;
|
|
||||||
return expand_(str);
|
|
||||||
}
|
|
||||||
return [str];
|
|
||||||
}
|
|
||||||
let n;
|
|
||||||
if (isSequence) {
|
|
||||||
n = m.body.split(/\.\./);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
n = parseCommaParts(m.body);
|
|
||||||
if (n.length === 1 && n[0] !== undefined) {
|
|
||||||
// x{{a,b}}y ==> x{a}y x{b}y
|
|
||||||
n = expand_(n[0], false).map(embrace);
|
|
||||||
//XXX is this necessary? Can't seem to hit it in tests.
|
|
||||||
/* c8 ignore start */
|
|
||||||
if (n.length === 1) {
|
|
||||||
return post.map(p => m.pre + n[0] + p);
|
|
||||||
}
|
|
||||||
/* c8 ignore stop */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// at this point, n is the parts, and we know it's not a comma set
|
|
||||||
// with a single entry.
|
|
||||||
let N;
|
|
||||||
if (isSequence && n[0] !== undefined && n[1] !== undefined) {
|
|
||||||
const x = numeric(n[0]);
|
|
||||||
const y = numeric(n[1]);
|
|
||||||
const width = Math.max(n[0].length, n[1].length);
|
|
||||||
let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1;
|
|
||||||
let test = lte;
|
|
||||||
const reverse = y < x;
|
|
||||||
if (reverse) {
|
|
||||||
incr *= -1;
|
|
||||||
test = gte;
|
|
||||||
}
|
|
||||||
const pad = n.some(isPadded);
|
|
||||||
N = [];
|
|
||||||
for (let i = x; test(i, y); i += incr) {
|
|
||||||
let c;
|
|
||||||
if (isAlphaSequence) {
|
|
||||||
c = String.fromCharCode(i);
|
|
||||||
if (c === '\\') {
|
|
||||||
c = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
c = String(i);
|
|
||||||
if (pad) {
|
|
||||||
const need = width - c.length;
|
|
||||||
if (need > 0) {
|
|
||||||
const z = new Array(need + 1).join('0');
|
|
||||||
if (i < 0) {
|
|
||||||
c = '-' + z + c.slice(1);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
c = z + c;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
N.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
N = [];
|
|
||||||
for (let j = 0; j < n.length; j++) {
|
|
||||||
N.push.apply(N, expand_(n[j], false));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (let j = 0; j < N.length; j++) {
|
|
||||||
for (let k = 0; k < post.length; k++) {
|
|
||||||
const expansion = pre + N[j] + post[k];
|
|
||||||
if (!isTop || isSequence || expansion) {
|
|
||||||
expansions.push(expansion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return expansions;
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/brace-expansion/dist/commonjs/index.js.map
generated
vendored
1
node_modules/@isaacs/brace-expansion/dist/commonjs/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@isaacs/brace-expansion/dist/commonjs/package.json
generated
vendored
3
node_modules/@isaacs/brace-expansion/dist/commonjs/package.json
generated
vendored
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "commonjs"
|
|
||||||
}
|
|
||||||
2
node_modules/@isaacs/brace-expansion/dist/esm/index.d.ts
generated
vendored
2
node_modules/@isaacs/brace-expansion/dist/esm/index.d.ts
generated
vendored
@@ -1,2 +0,0 @@
|
|||||||
export declare function expand(str: string): string[];
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/brace-expansion/dist/esm/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/brace-expansion/dist/esm/index.d.ts.map
generated
vendored
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAwEA,wBAAgB,MAAM,CAAC,GAAG,EAAE,MAAM,YAgBjC"}
|
|
||||||
193
node_modules/@isaacs/brace-expansion/dist/esm/index.js
generated
vendored
193
node_modules/@isaacs/brace-expansion/dist/esm/index.js
generated
vendored
@@ -1,193 +0,0 @@
|
|||||||
import { balanced } from '@isaacs/balanced-match';
|
|
||||||
const escSlash = '\0SLASH' + Math.random() + '\0';
|
|
||||||
const escOpen = '\0OPEN' + Math.random() + '\0';
|
|
||||||
const escClose = '\0CLOSE' + Math.random() + '\0';
|
|
||||||
const escComma = '\0COMMA' + Math.random() + '\0';
|
|
||||||
const escPeriod = '\0PERIOD' + Math.random() + '\0';
|
|
||||||
const escSlashPattern = new RegExp(escSlash, 'g');
|
|
||||||
const escOpenPattern = new RegExp(escOpen, 'g');
|
|
||||||
const escClosePattern = new RegExp(escClose, 'g');
|
|
||||||
const escCommaPattern = new RegExp(escComma, 'g');
|
|
||||||
const escPeriodPattern = new RegExp(escPeriod, 'g');
|
|
||||||
const slashPattern = /\\\\/g;
|
|
||||||
const openPattern = /\\{/g;
|
|
||||||
const closePattern = /\\}/g;
|
|
||||||
const commaPattern = /\\,/g;
|
|
||||||
const periodPattern = /\\./g;
|
|
||||||
function numeric(str) {
|
|
||||||
return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0);
|
|
||||||
}
|
|
||||||
function escapeBraces(str) {
|
|
||||||
return str
|
|
||||||
.replace(slashPattern, escSlash)
|
|
||||||
.replace(openPattern, escOpen)
|
|
||||||
.replace(closePattern, escClose)
|
|
||||||
.replace(commaPattern, escComma)
|
|
||||||
.replace(periodPattern, escPeriod);
|
|
||||||
}
|
|
||||||
function unescapeBraces(str) {
|
|
||||||
return str
|
|
||||||
.replace(escSlashPattern, '\\')
|
|
||||||
.replace(escOpenPattern, '{')
|
|
||||||
.replace(escClosePattern, '}')
|
|
||||||
.replace(escCommaPattern, ',')
|
|
||||||
.replace(escPeriodPattern, '.');
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Basically just str.split(","), but handling cases
|
|
||||||
* where we have nested braced sections, which should be
|
|
||||||
* treated as individual members, like {a,{b,c},d}
|
|
||||||
*/
|
|
||||||
function parseCommaParts(str) {
|
|
||||||
if (!str) {
|
|
||||||
return [''];
|
|
||||||
}
|
|
||||||
const parts = [];
|
|
||||||
const m = balanced('{', '}', str);
|
|
||||||
if (!m) {
|
|
||||||
return str.split(',');
|
|
||||||
}
|
|
||||||
const { pre, body, post } = m;
|
|
||||||
const p = pre.split(',');
|
|
||||||
p[p.length - 1] += '{' + body + '}';
|
|
||||||
const postParts = parseCommaParts(post);
|
|
||||||
if (post.length) {
|
|
||||||
;
|
|
||||||
p[p.length - 1] += postParts.shift();
|
|
||||||
p.push.apply(p, postParts);
|
|
||||||
}
|
|
||||||
parts.push.apply(parts, p);
|
|
||||||
return parts;
|
|
||||||
}
|
|
||||||
export function expand(str) {
|
|
||||||
if (!str) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
// I don't know why Bash 4.3 does this, but it does.
|
|
||||||
// Anything starting with {} will have the first two bytes preserved
|
|
||||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
|
||||||
// but a{},b}c will be expanded to [a}c,abc].
|
|
||||||
// One could argue that this is a bug in Bash, but since the goal of
|
|
||||||
// this module is to match Bash's rules, we escape a leading {}
|
|
||||||
if (str.slice(0, 2) === '{}') {
|
|
||||||
str = '\\{\\}' + str.slice(2);
|
|
||||||
}
|
|
||||||
return expand_(escapeBraces(str), true).map(unescapeBraces);
|
|
||||||
}
|
|
||||||
function embrace(str) {
|
|
||||||
return '{' + str + '}';
|
|
||||||
}
|
|
||||||
function isPadded(el) {
|
|
||||||
return /^-?0\d/.test(el);
|
|
||||||
}
|
|
||||||
function lte(i, y) {
|
|
||||||
return i <= y;
|
|
||||||
}
|
|
||||||
function gte(i, y) {
|
|
||||||
return i >= y;
|
|
||||||
}
|
|
||||||
function expand_(str, isTop) {
|
|
||||||
/** @type {string[]} */
|
|
||||||
const expansions = [];
|
|
||||||
const m = balanced('{', '}', str);
|
|
||||||
if (!m)
|
|
||||||
return [str];
|
|
||||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
|
||||||
const pre = m.pre;
|
|
||||||
const post = m.post.length ? expand_(m.post, false) : [''];
|
|
||||||
if (/\$$/.test(m.pre)) {
|
|
||||||
for (let k = 0; k < post.length; k++) {
|
|
||||||
const expansion = pre + '{' + m.body + '}' + post[k];
|
|
||||||
expansions.push(expansion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
|
||||||
const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
|
||||||
const isSequence = isNumericSequence || isAlphaSequence;
|
|
||||||
const isOptions = m.body.indexOf(',') >= 0;
|
|
||||||
if (!isSequence && !isOptions) {
|
|
||||||
// {a},b}
|
|
||||||
if (m.post.match(/,(?!,).*\}/)) {
|
|
||||||
str = m.pre + '{' + m.body + escClose + m.post;
|
|
||||||
return expand_(str);
|
|
||||||
}
|
|
||||||
return [str];
|
|
||||||
}
|
|
||||||
let n;
|
|
||||||
if (isSequence) {
|
|
||||||
n = m.body.split(/\.\./);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
n = parseCommaParts(m.body);
|
|
||||||
if (n.length === 1 && n[0] !== undefined) {
|
|
||||||
// x{{a,b}}y ==> x{a}y x{b}y
|
|
||||||
n = expand_(n[0], false).map(embrace);
|
|
||||||
//XXX is this necessary? Can't seem to hit it in tests.
|
|
||||||
/* c8 ignore start */
|
|
||||||
if (n.length === 1) {
|
|
||||||
return post.map(p => m.pre + n[0] + p);
|
|
||||||
}
|
|
||||||
/* c8 ignore stop */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// at this point, n is the parts, and we know it's not a comma set
|
|
||||||
// with a single entry.
|
|
||||||
let N;
|
|
||||||
if (isSequence && n[0] !== undefined && n[1] !== undefined) {
|
|
||||||
const x = numeric(n[0]);
|
|
||||||
const y = numeric(n[1]);
|
|
||||||
const width = Math.max(n[0].length, n[1].length);
|
|
||||||
let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1;
|
|
||||||
let test = lte;
|
|
||||||
const reverse = y < x;
|
|
||||||
if (reverse) {
|
|
||||||
incr *= -1;
|
|
||||||
test = gte;
|
|
||||||
}
|
|
||||||
const pad = n.some(isPadded);
|
|
||||||
N = [];
|
|
||||||
for (let i = x; test(i, y); i += incr) {
|
|
||||||
let c;
|
|
||||||
if (isAlphaSequence) {
|
|
||||||
c = String.fromCharCode(i);
|
|
||||||
if (c === '\\') {
|
|
||||||
c = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
c = String(i);
|
|
||||||
if (pad) {
|
|
||||||
const need = width - c.length;
|
|
||||||
if (need > 0) {
|
|
||||||
const z = new Array(need + 1).join('0');
|
|
||||||
if (i < 0) {
|
|
||||||
c = '-' + z + c.slice(1);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
c = z + c;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
N.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
N = [];
|
|
||||||
for (let j = 0; j < n.length; j++) {
|
|
||||||
N.push.apply(N, expand_(n[j], false));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (let j = 0; j < N.length; j++) {
|
|
||||||
for (let k = 0; k < post.length; k++) {
|
|
||||||
const expansion = pre + N[j] + post[k];
|
|
||||||
if (!isTop || isSequence || expansion) {
|
|
||||||
expansions.push(expansion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return expansions;
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/brace-expansion/dist/esm/index.js.map
generated
vendored
1
node_modules/@isaacs/brace-expansion/dist/esm/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@isaacs/brace-expansion/dist/esm/package.json
generated
vendored
3
node_modules/@isaacs/brace-expansion/dist/esm/package.json
generated
vendored
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "module"
|
|
||||||
}
|
|
||||||
71
node_modules/@isaacs/brace-expansion/package.json
generated
vendored
71
node_modules/@isaacs/brace-expansion/package.json
generated
vendored
@@ -1,71 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@isaacs/brace-expansion",
|
|
||||||
"description": "Brace expansion as known from sh/bash",
|
|
||||||
"version": "5.0.0",
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": {
|
|
||||||
"import": {
|
|
||||||
"types": "./dist/esm/index.d.ts",
|
|
||||||
"default": "./dist/esm/index.js"
|
|
||||||
},
|
|
||||||
"require": {
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"default": "./dist/commonjs/index.js"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "module",
|
|
||||||
"scripts": {
|
|
||||||
"preversion": "npm test",
|
|
||||||
"postversion": "npm publish",
|
|
||||||
"prepublishOnly": "git push origin --follow-tags",
|
|
||||||
"prepare": "tshy",
|
|
||||||
"pretest": "npm run prepare",
|
|
||||||
"presnap": "npm run prepare",
|
|
||||||
"test": "tap",
|
|
||||||
"snap": "tap",
|
|
||||||
"format": "prettier --write . --loglevel warn",
|
|
||||||
"benchmark": "node benchmark/index.js",
|
|
||||||
"typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
|
|
||||||
},
|
|
||||||
"prettier": {
|
|
||||||
"semi": false,
|
|
||||||
"printWidth": 80,
|
|
||||||
"tabWidth": 2,
|
|
||||||
"useTabs": false,
|
|
||||||
"singleQuote": true,
|
|
||||||
"jsxSingleQuote": false,
|
|
||||||
"bracketSameLine": true,
|
|
||||||
"arrowParens": "avoid",
|
|
||||||
"endOfLine": "lf"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/brace-expansion": "^1.1.2",
|
|
||||||
"@types/node": "^24.0.0",
|
|
||||||
"mkdirp": "^3.0.1",
|
|
||||||
"prettier": "^3.3.2",
|
|
||||||
"tap": "^21.1.0",
|
|
||||||
"tshy": "^3.0.2",
|
|
||||||
"typedoc": "^0.28.5"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@isaacs/balanced-match": "^4.0.1"
|
|
||||||
},
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": "20 || >=22"
|
|
||||||
},
|
|
||||||
"tshy": {
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"main": "./dist/commonjs/index.js",
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"module": "./dist/esm/index.js"
|
|
||||||
}
|
|
||||||
14
node_modules/aproba/LICENSE
generated
vendored
Normal file
14
node_modules/aproba/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
Copyright (c) 2015, Rebecca Turner <me@re-becca.org>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
94
node_modules/aproba/README.md
generated
vendored
Normal file
94
node_modules/aproba/README.md
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
aproba
|
||||||
|
======
|
||||||
|
|
||||||
|
A ridiculously light-weight function argument validator
|
||||||
|
|
||||||
|
```
|
||||||
|
var validate = require("aproba")
|
||||||
|
|
||||||
|
function myfunc(a, b, c) {
|
||||||
|
// `a` must be a string, `b` a number, `c` a function
|
||||||
|
validate('SNF', arguments) // [a,b,c] is also valid
|
||||||
|
}
|
||||||
|
|
||||||
|
myfunc('test', 23, function () {}) // ok
|
||||||
|
myfunc(123, 23, function () {}) // type error
|
||||||
|
myfunc('test', 23) // missing arg error
|
||||||
|
myfunc('test', 23, function () {}, true) // too many args error
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Valid types are:
|
||||||
|
|
||||||
|
| type | description
|
||||||
|
| :--: | :----------
|
||||||
|
| * | matches any type
|
||||||
|
| A | `Array.isArray` OR an `arguments` object
|
||||||
|
| S | typeof == string
|
||||||
|
| N | typeof == number
|
||||||
|
| F | typeof == function
|
||||||
|
| O | typeof == object and not type A and not type E
|
||||||
|
| B | typeof == boolean
|
||||||
|
| E | `instanceof Error` OR `null` **(special: see below)**
|
||||||
|
| Z | == `null`
|
||||||
|
|
||||||
|
Validation failures throw one of three exception types, distinguished by a
|
||||||
|
`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`.
|
||||||
|
|
||||||
|
If you pass in an invalid type then it will throw with a code of
|
||||||
|
`EUNKNOWNTYPE`.
|
||||||
|
|
||||||
|
If an **error** argument is found and is not null then the remaining
|
||||||
|
arguments are optional. That is, if you say `ESO` then that's like using a
|
||||||
|
non-magical `E` in: `E|ESO|ZSO`.
|
||||||
|
|
||||||
|
### But I have optional arguments?!
|
||||||
|
|
||||||
|
You can provide more than one signature by separating them with pipes `|`.
|
||||||
|
If any signature matches the arguments then they'll be considered valid.
|
||||||
|
|
||||||
|
So for example, say you wanted to write a signature for
|
||||||
|
`fs.createWriteStream`. The docs for it describe it thusly:
|
||||||
|
|
||||||
|
```
|
||||||
|
fs.createWriteStream(path[, options])
|
||||||
|
```
|
||||||
|
|
||||||
|
This would be a signature of `SO|S`. That is, a string and and object, or
|
||||||
|
just a string.
|
||||||
|
|
||||||
|
Now, if you read the full `fs` docs, you'll see that actually path can ALSO
|
||||||
|
be a buffer. And options can be a string, that is:
|
||||||
|
```
|
||||||
|
path <String> | <Buffer>
|
||||||
|
options <String> | <Object>
|
||||||
|
```
|
||||||
|
|
||||||
|
To reproduce this you have to fully enumerate all of the possible
|
||||||
|
combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The
|
||||||
|
awkwardness is a feature: It reminds you of the complexity you're adding to
|
||||||
|
your API when you do this sort of thing.
|
||||||
|
|
||||||
|
|
||||||
|
### Browser support
|
||||||
|
|
||||||
|
This has no dependencies and should work in browsers, though you'll have
|
||||||
|
noisier stack traces.
|
||||||
|
|
||||||
|
### Why this exists
|
||||||
|
|
||||||
|
I wanted a very simple argument validator. It needed to do two things:
|
||||||
|
|
||||||
|
1. Be more concise and easier to use than assertions
|
||||||
|
|
||||||
|
2. Not encourage an infinite bikeshed of DSLs
|
||||||
|
|
||||||
|
This is why types are specified by a single character and there's no such
|
||||||
|
thing as an optional argument.
|
||||||
|
|
||||||
|
This is not intended to validate user data. This is specifically about
|
||||||
|
asserting the interface of your functions.
|
||||||
|
|
||||||
|
If you need greater validation, I encourage you to write them by hand or
|
||||||
|
look elsewhere.
|
||||||
|
|
||||||
105
node_modules/aproba/index.js
generated
vendored
Normal file
105
node_modules/aproba/index.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
function isArguments (thingy) {
|
||||||
|
return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
|
||||||
|
}
|
||||||
|
|
||||||
|
var types = {
|
||||||
|
'*': {label: 'any', check: function () { return true }},
|
||||||
|
A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
|
||||||
|
S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
|
||||||
|
N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
|
||||||
|
F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
|
||||||
|
O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
|
||||||
|
B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
|
||||||
|
E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
|
||||||
|
Z: {label: 'null', check: function (thingy) { return thingy == null }}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addSchema (schema, arity) {
|
||||||
|
var group = arity[schema.length] = arity[schema.length] || []
|
||||||
|
if (group.indexOf(schema) === -1) group.push(schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
var validate = module.exports = function (rawSchemas, args) {
|
||||||
|
if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
|
||||||
|
if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
|
||||||
|
if (!args) throw missingRequiredArg(1, 'args')
|
||||||
|
if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
|
||||||
|
if (!types.A.check(args)) throw invalidType(1, ['array'], args)
|
||||||
|
var schemas = rawSchemas.split('|')
|
||||||
|
var arity = {}
|
||||||
|
|
||||||
|
schemas.forEach(function (schema) {
|
||||||
|
for (var ii = 0; ii < schema.length; ++ii) {
|
||||||
|
var type = schema[ii]
|
||||||
|
if (!types[type]) throw unknownType(ii, type)
|
||||||
|
}
|
||||||
|
if (/E.*E/.test(schema)) throw moreThanOneError(schema)
|
||||||
|
addSchema(schema, arity)
|
||||||
|
if (/E/.test(schema)) {
|
||||||
|
addSchema(schema.replace(/E.*$/, 'E'), arity)
|
||||||
|
addSchema(schema.replace(/E/, 'Z'), arity)
|
||||||
|
if (schema.length === 1) addSchema('', arity)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
var matching = arity[args.length]
|
||||||
|
if (!matching) {
|
||||||
|
throw wrongNumberOfArgs(Object.keys(arity), args.length)
|
||||||
|
}
|
||||||
|
for (var ii = 0; ii < args.length; ++ii) {
|
||||||
|
var newMatching = matching.filter(function (schema) {
|
||||||
|
var type = schema[ii]
|
||||||
|
var typeCheck = types[type].check
|
||||||
|
return typeCheck(args[ii])
|
||||||
|
})
|
||||||
|
if (!newMatching.length) {
|
||||||
|
var labels = matching.map(function (schema) {
|
||||||
|
return types[schema[ii]].label
|
||||||
|
}).filter(function (schema) { return schema != null })
|
||||||
|
throw invalidType(ii, labels, args[ii])
|
||||||
|
}
|
||||||
|
matching = newMatching
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function missingRequiredArg (num) {
|
||||||
|
return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function unknownType (num, type) {
|
||||||
|
return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function invalidType (num, expectedTypes, value) {
|
||||||
|
var valueType
|
||||||
|
Object.keys(types).forEach(function (typeCode) {
|
||||||
|
if (types[typeCode].check(value)) valueType = types[typeCode].label
|
||||||
|
})
|
||||||
|
return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
|
||||||
|
englishList(expectedTypes) + ' but got ' + valueType)
|
||||||
|
}
|
||||||
|
|
||||||
|
function englishList (list) {
|
||||||
|
return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
|
||||||
|
}
|
||||||
|
|
||||||
|
function wrongNumberOfArgs (expected, got) {
|
||||||
|
var english = englishList(expected)
|
||||||
|
var args = expected.every(function (ex) { return ex.length === 1 })
|
||||||
|
? 'argument'
|
||||||
|
: 'arguments'
|
||||||
|
return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
|
||||||
|
}
|
||||||
|
|
||||||
|
function moreThanOneError (schema) {
|
||||||
|
return newException('ETOOMANYERRORTYPES',
|
||||||
|
'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
|
||||||
|
}
|
||||||
|
|
||||||
|
function newException (code, msg) {
|
||||||
|
var e = new Error(msg)
|
||||||
|
e.code = code
|
||||||
|
if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
|
||||||
|
return e
|
||||||
|
}
|
||||||
34
node_modules/aproba/package.json
generated
vendored
Normal file
34
node_modules/aproba/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"name": "aproba",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"description": "A ridiculously light-weight argument validator (now browser friendly)",
|
||||||
|
"main": "index.js",
|
||||||
|
"directories": {
|
||||||
|
"test": "test"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"standard": "^10.0.3",
|
||||||
|
"tap": "^10.0.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"test": "standard && tap -j3 test/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/iarna/aproba"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"argument",
|
||||||
|
"validate"
|
||||||
|
],
|
||||||
|
"author": "Rebecca Turner <me@re-becca.org>",
|
||||||
|
"license": "ISC",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/iarna/aproba/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/iarna/aproba"
|
||||||
|
}
|
||||||
37
node_modules/are-we-there-yet/CHANGES.md
generated
vendored
Normal file
37
node_modules/are-we-there-yet/CHANGES.md
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
Hi, figured we could actually use a changelog now:
|
||||||
|
|
||||||
|
## 1.1.5 2018-05-24
|
||||||
|
|
||||||
|
* [#92](https://github.com/iarna/are-we-there-yet/pull/92) Fix bug where
|
||||||
|
`finish` would throw errors when including `TrackerStream` objects in
|
||||||
|
`TrackerGroup` collections. (@brianloveswords)
|
||||||
|
|
||||||
|
## 1.1.4 2017-04-21
|
||||||
|
|
||||||
|
* Fix typo in package.json
|
||||||
|
|
||||||
|
## 1.1.3 2017-04-21
|
||||||
|
|
||||||
|
* Improve documentation and limit files included in the distribution.
|
||||||
|
|
||||||
|
## 1.1.2 2016-03-15
|
||||||
|
|
||||||
|
* Add tracker group cycle detection and tests for it
|
||||||
|
|
||||||
|
## 1.1.1 2016-01-29
|
||||||
|
|
||||||
|
* Fix a typo in stream completion tracker
|
||||||
|
|
||||||
|
## 1.1.0 2016-01-29
|
||||||
|
|
||||||
|
* Rewrote completion percent computation to be low impact– no more walking a
|
||||||
|
tree of completion groups every time we need this info. Previously, with
|
||||||
|
medium sized tree of completion groups, even a relatively modest number of
|
||||||
|
calls to the top level `completed()` method would result in absurd numbers
|
||||||
|
of calls overall as it walked down the tree. We now, instead, keep track as
|
||||||
|
we bubble up changes, so the computation is limited to when data changes and
|
||||||
|
to the depth of that one branch, instead of _every_ node. (Plus, we were already
|
||||||
|
incurring _this_ cost, since we already bubbled out changes.)
|
||||||
|
* Moved different tracker types out to their own files.
|
||||||
|
* Made tests test for TOO MANY events too.
|
||||||
|
* Standarized the source code formatting
|
||||||
5
node_modules/are-we-there-yet/LICENSE
generated
vendored
Normal file
5
node_modules/are-we-there-yet/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Copyright (c) 2015, Rebecca Turner
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
195
node_modules/are-we-there-yet/README.md
generated
vendored
Normal file
195
node_modules/are-we-there-yet/README.md
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
are-we-there-yet
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Track complex hiearchies of asynchronous task completion statuses. This is
|
||||||
|
intended to give you a way of recording and reporting the progress of the big
|
||||||
|
recursive fan-out and gather type workflows that are so common in async.
|
||||||
|
|
||||||
|
What you do with this completion data is up to you, but the most common use case is to
|
||||||
|
feed it to one of the many progress bar modules.
|
||||||
|
|
||||||
|
Most progress bar modules include a rudamentary version of this, but my
|
||||||
|
needs were more complex.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var TrackerGroup = require("are-we-there-yet").TrackerGroup
|
||||||
|
|
||||||
|
var top = new TrackerGroup("program")
|
||||||
|
|
||||||
|
var single = top.newItem("one thing", 100)
|
||||||
|
single.completeWork(20)
|
||||||
|
|
||||||
|
console.log(top.completed()) // 0.2
|
||||||
|
|
||||||
|
fs.stat("file", function(er, stat) {
|
||||||
|
if (er) throw er
|
||||||
|
var stream = top.newStream("file", stat.size)
|
||||||
|
console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete
|
||||||
|
// and 50% * 20% == 10%
|
||||||
|
fs.createReadStream("file").pipe(stream).on("data", function (chunk) {
|
||||||
|
// do stuff with chunk
|
||||||
|
})
|
||||||
|
top.on("change", function (name) {
|
||||||
|
// called each time a chunk is read from "file"
|
||||||
|
// top.completed() will start at 0.1 and fill up to 0.6 as the file is read
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Shared Methods
|
||||||
|
==============
|
||||||
|
|
||||||
|
* var completed = tracker.completed()
|
||||||
|
|
||||||
|
Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream`
|
||||||
|
|
||||||
|
Returns the ratio of completed work to work to be done. Range of 0 to 1.
|
||||||
|
|
||||||
|
* tracker.finish()
|
||||||
|
|
||||||
|
Implemented in: `Tracker`, `TrackerGroup`
|
||||||
|
|
||||||
|
Marks the tracker as completed. With a TrackerGroup this marks all of its
|
||||||
|
components as completed.
|
||||||
|
|
||||||
|
Marks all of the components of this tracker as finished, which in turn means
|
||||||
|
that `tracker.completed()` for this will now be 1.
|
||||||
|
|
||||||
|
This will result in one or more `change` events being emitted.
|
||||||
|
|
||||||
|
Events
|
||||||
|
======
|
||||||
|
|
||||||
|
All tracker objects emit `change` events with the following arguments:
|
||||||
|
|
||||||
|
```
|
||||||
|
function (name, completed, tracker)
|
||||||
|
```
|
||||||
|
|
||||||
|
`name` is the name of the tracker that originally emitted the event,
|
||||||
|
or if it didn't have one, the first containing tracker group that had one.
|
||||||
|
|
||||||
|
`completed` is the percent complete (as returned by `tracker.completed()` method).
|
||||||
|
|
||||||
|
`tracker` is the tracker object that you are listening for events on.
|
||||||
|
|
||||||
|
TrackerGroup
|
||||||
|
============
|
||||||
|
|
||||||
|
* var tracker = new TrackerGroup(**name**)
|
||||||
|
|
||||||
|
* **name** *(optional)* - The name of this tracker group, used in change
|
||||||
|
notifications if the component updating didn't have a name. Defaults to undefined.
|
||||||
|
|
||||||
|
Creates a new empty tracker aggregation group. These are trackers whose
|
||||||
|
completion status is determined by the completion status of other trackers.
|
||||||
|
|
||||||
|
* tracker.addUnit(**otherTracker**, **weight**)
|
||||||
|
|
||||||
|
* **otherTracker** - Any of the other are-we-there-yet tracker objects
|
||||||
|
* **weight** *(optional)* - The weight to give the tracker, defaults to 1.
|
||||||
|
|
||||||
|
Adds the **otherTracker** to this aggregation group. The weight determines
|
||||||
|
how long you expect this tracker to take to complete in proportion to other
|
||||||
|
units. So for instance, if you add one tracker with a weight of 1 and
|
||||||
|
another with a weight of 2, you're saying the second will take twice as long
|
||||||
|
to complete as the first. As such, the first will account for 33% of the
|
||||||
|
completion of this tracker and the second will account for the other 67%.
|
||||||
|
|
||||||
|
Returns **otherTracker**.
|
||||||
|
|
||||||
|
* var subGroup = tracker.newGroup(**name**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subGroup = tracker.addUnit(new TrackerGroup(name), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* var subItem = tracker.newItem(**name**, **todo**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subItem = tracker.addUnit(new Tracker(name, todo), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* var subStream = tracker.newStream(**name**, **todo**, **weight**)
|
||||||
|
|
||||||
|
The above is exactly equivalent to:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var subStream = tracker.addUnit(new TrackerStream(name, todo), weight)
|
||||||
|
```
|
||||||
|
|
||||||
|
* console.log( tracker.debug() )
|
||||||
|
|
||||||
|
Returns a tree showing the completion of this tracker group and all of its
|
||||||
|
children, including recursively entering all of the children.
|
||||||
|
|
||||||
|
Tracker
|
||||||
|
=======
|
||||||
|
|
||||||
|
* var tracker = new Tracker(**name**, **todo**)
|
||||||
|
|
||||||
|
* **name** *(optional)* The name of this counter to report in change
|
||||||
|
events. Defaults to undefined.
|
||||||
|
* **todo** *(optional)* The amount of work todo (a number). Defaults to 0.
|
||||||
|
|
||||||
|
Ordinarily these are constructed as a part of a tracker group (via
|
||||||
|
`newItem`).
|
||||||
|
|
||||||
|
* var completed = tracker.completed()
|
||||||
|
|
||||||
|
Returns the ratio of completed work to work to be done. Range of 0 to 1. If
|
||||||
|
total work to be done is 0 then it will return 0.
|
||||||
|
|
||||||
|
* tracker.addWork(**todo**)
|
||||||
|
|
||||||
|
* **todo** A number to add to the amount of work to be done.
|
||||||
|
|
||||||
|
Increases the amount of work to be done, thus decreasing the completion
|
||||||
|
percentage. Triggers a `change` event.
|
||||||
|
|
||||||
|
* tracker.completeWork(**completed**)
|
||||||
|
|
||||||
|
* **completed** A number to add to the work complete
|
||||||
|
|
||||||
|
Increase the amount of work complete, thus increasing the completion percentage.
|
||||||
|
Will never increase the work completed past the amount of work todo. That is,
|
||||||
|
percentages > 100% are not allowed. Triggers a `change` event.
|
||||||
|
|
||||||
|
* tracker.finish()
|
||||||
|
|
||||||
|
Marks this tracker as finished, tracker.completed() will now be 1. Triggers
|
||||||
|
a `change` event.
|
||||||
|
|
||||||
|
TrackerStream
|
||||||
|
=============
|
||||||
|
|
||||||
|
* var tracker = new TrackerStream(**name**, **size**, **options**)
|
||||||
|
|
||||||
|
* **name** *(optional)* The name of this counter to report in change
|
||||||
|
events. Defaults to undefined.
|
||||||
|
* **size** *(optional)* The number of bytes being sent through this stream.
|
||||||
|
* **options** *(optional)* A hash of stream options
|
||||||
|
|
||||||
|
The tracker stream object is a pass through stream that updates an internal
|
||||||
|
tracker object each time a block passes through. It's intended to track
|
||||||
|
downloads, file extraction and other related activities. You use it by piping
|
||||||
|
your data source into it and then using it as your data source.
|
||||||
|
|
||||||
|
If your data has a length attribute then that's used as the amount of work
|
||||||
|
completed when the chunk is passed through. If it does not (eg, object
|
||||||
|
streams) then each chunk counts as completing 1 unit of work, so your size
|
||||||
|
should be the total number of objects being streamed.
|
||||||
|
|
||||||
|
* tracker.addWork(**todo**)
|
||||||
|
|
||||||
|
* **todo** Increase the expected overall size by **todo** bytes.
|
||||||
|
|
||||||
|
Increases the amount of work to be done, thus decreasing the completion
|
||||||
|
percentage. Triggers a `change` event.
|
||||||
4
node_modules/are-we-there-yet/index.js
generated
vendored
Normal file
4
node_modules/are-we-there-yet/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
'use strict'
|
||||||
|
exports.TrackerGroup = require('./tracker-group.js')
|
||||||
|
exports.Tracker = require('./tracker.js')
|
||||||
|
exports.TrackerStream = require('./tracker-stream.js')
|
||||||
35
node_modules/are-we-there-yet/package.json
generated
vendored
Normal file
35
node_modules/are-we-there-yet/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "are-we-there-yet",
|
||||||
|
"version": "1.1.7",
|
||||||
|
"description": "Keep track of the overall completion of many disparate processes",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "standard && tap test/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/iarna/are-we-there-yet.git"
|
||||||
|
},
|
||||||
|
"author": "Rebecca Turner (http://re-becca.org)",
|
||||||
|
"license": "ISC",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/iarna/are-we-there-yet/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/iarna/are-we-there-yet",
|
||||||
|
"devDependencies": {
|
||||||
|
"standard": "^11.0.1",
|
||||||
|
"tap": "^12.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"delegates": "^1.0.0",
|
||||||
|
"readable-stream": "^2.0.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"tracker-base.js",
|
||||||
|
"tracker-group.js",
|
||||||
|
"tracker-stream.js",
|
||||||
|
"tracker.js",
|
||||||
|
"CHANGES.md"
|
||||||
|
]
|
||||||
|
}
|
||||||
11
node_modules/are-we-there-yet/tracker-base.js
generated
vendored
Normal file
11
node_modules/are-we-there-yet/tracker-base.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
'use strict'
|
||||||
|
var EventEmitter = require('events').EventEmitter
|
||||||
|
var util = require('util')
|
||||||
|
|
||||||
|
var trackerId = 0
|
||||||
|
var TrackerBase = module.exports = function (name) {
|
||||||
|
EventEmitter.call(this)
|
||||||
|
this.id = ++trackerId
|
||||||
|
this.name = name
|
||||||
|
}
|
||||||
|
util.inherits(TrackerBase, EventEmitter)
|
||||||
107
node_modules/are-we-there-yet/tracker-group.js
generated
vendored
Normal file
107
node_modules/are-we-there-yet/tracker-group.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var TrackerBase = require('./tracker-base.js')
|
||||||
|
var Tracker = require('./tracker.js')
|
||||||
|
var TrackerStream = require('./tracker-stream.js')
|
||||||
|
|
||||||
|
var TrackerGroup = module.exports = function (name) {
|
||||||
|
TrackerBase.call(this, name)
|
||||||
|
this.parentGroup = null
|
||||||
|
this.trackers = []
|
||||||
|
this.completion = {}
|
||||||
|
this.weight = {}
|
||||||
|
this.totalWeight = 0
|
||||||
|
this.finished = false
|
||||||
|
this.bubbleChange = bubbleChange(this)
|
||||||
|
}
|
||||||
|
util.inherits(TrackerGroup, TrackerBase)
|
||||||
|
|
||||||
|
function bubbleChange (trackerGroup) {
|
||||||
|
return function (name, completed, tracker) {
|
||||||
|
trackerGroup.completion[tracker.id] = completed
|
||||||
|
if (trackerGroup.finished) return
|
||||||
|
trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.nameInTree = function () {
|
||||||
|
var names = []
|
||||||
|
var from = this
|
||||||
|
while (from) {
|
||||||
|
names.unshift(from.name)
|
||||||
|
from = from.parentGroup
|
||||||
|
}
|
||||||
|
return names.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.addUnit = function (unit, weight) {
|
||||||
|
if (unit.addUnit) {
|
||||||
|
var toTest = this
|
||||||
|
while (toTest) {
|
||||||
|
if (unit === toTest) {
|
||||||
|
throw new Error(
|
||||||
|
'Attempted to add tracker group ' +
|
||||||
|
unit.name + ' to tree that already includes it ' +
|
||||||
|
this.nameInTree(this))
|
||||||
|
}
|
||||||
|
toTest = toTest.parentGroup
|
||||||
|
}
|
||||||
|
unit.parentGroup = this
|
||||||
|
}
|
||||||
|
this.weight[unit.id] = weight || 1
|
||||||
|
this.totalWeight += this.weight[unit.id]
|
||||||
|
this.trackers.push(unit)
|
||||||
|
this.completion[unit.id] = unit.completed()
|
||||||
|
unit.on('change', this.bubbleChange)
|
||||||
|
if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit)
|
||||||
|
return unit
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.completed = function () {
|
||||||
|
if (this.trackers.length === 0) return 0
|
||||||
|
var valPerWeight = 1 / this.totalWeight
|
||||||
|
var completed = 0
|
||||||
|
for (var ii = 0; ii < this.trackers.length; ii++) {
|
||||||
|
var trackerId = this.trackers[ii].id
|
||||||
|
completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId]
|
||||||
|
}
|
||||||
|
return completed
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newGroup = function (name, weight) {
|
||||||
|
return this.addUnit(new TrackerGroup(name), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newItem = function (name, todo, weight) {
|
||||||
|
return this.addUnit(new Tracker(name, todo), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.newStream = function (name, todo, weight) {
|
||||||
|
return this.addUnit(new TrackerStream(name, todo), weight)
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerGroup.prototype.finish = function () {
|
||||||
|
this.finished = true
|
||||||
|
if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)
|
||||||
|
for (var ii = 0; ii < this.trackers.length; ii++) {
|
||||||
|
var tracker = this.trackers[ii]
|
||||||
|
tracker.finish()
|
||||||
|
tracker.removeListener('change', this.bubbleChange)
|
||||||
|
}
|
||||||
|
this.emit('change', this.name, 1, this)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buffer = ' '
|
||||||
|
TrackerGroup.prototype.debug = function (depth) {
|
||||||
|
depth = depth || 0
|
||||||
|
var indent = depth ? buffer.substr(0, depth) : ''
|
||||||
|
var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'
|
||||||
|
this.trackers.forEach(function (tracker) {
|
||||||
|
if (tracker instanceof TrackerGroup) {
|
||||||
|
output += tracker.debug(depth + 1)
|
||||||
|
} else {
|
||||||
|
output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return output
|
||||||
|
}
|
||||||
36
node_modules/are-we-there-yet/tracker-stream.js
generated
vendored
Normal file
36
node_modules/are-we-there-yet/tracker-stream.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var stream = require('readable-stream')
|
||||||
|
var delegate = require('delegates')
|
||||||
|
var Tracker = require('./tracker.js')
|
||||||
|
|
||||||
|
var TrackerStream = module.exports = function (name, size, options) {
|
||||||
|
stream.Transform.call(this, options)
|
||||||
|
this.tracker = new Tracker(name, size)
|
||||||
|
this.name = name
|
||||||
|
this.id = this.tracker.id
|
||||||
|
this.tracker.on('change', delegateChange(this))
|
||||||
|
}
|
||||||
|
util.inherits(TrackerStream, stream.Transform)
|
||||||
|
|
||||||
|
function delegateChange (trackerStream) {
|
||||||
|
return function (name, completion, tracker) {
|
||||||
|
trackerStream.emit('change', name, completion, trackerStream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerStream.prototype._transform = function (data, encoding, cb) {
|
||||||
|
this.tracker.completeWork(data.length ? data.length : 1)
|
||||||
|
this.push(data)
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackerStream.prototype._flush = function (cb) {
|
||||||
|
this.tracker.finish()
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
delegate(TrackerStream.prototype, 'tracker')
|
||||||
|
.method('completed')
|
||||||
|
.method('addWork')
|
||||||
|
.method('finish')
|
||||||
30
node_modules/are-we-there-yet/tracker.js
generated
vendored
Normal file
30
node_modules/are-we-there-yet/tracker.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
'use strict'
|
||||||
|
var util = require('util')
|
||||||
|
var TrackerBase = require('./tracker-base.js')
|
||||||
|
|
||||||
|
var Tracker = module.exports = function (name, todo) {
|
||||||
|
TrackerBase.call(this, name)
|
||||||
|
this.workDone = 0
|
||||||
|
this.workTodo = todo || 0
|
||||||
|
}
|
||||||
|
util.inherits(Tracker, TrackerBase)
|
||||||
|
|
||||||
|
Tracker.prototype.completed = function () {
|
||||||
|
return this.workTodo === 0 ? 0 : this.workDone / this.workTodo
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.addWork = function (work) {
|
||||||
|
this.workTodo += work
|
||||||
|
this.emit('change', this.name, this.completed(), this)
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.completeWork = function (work) {
|
||||||
|
this.workDone += work
|
||||||
|
if (this.workDone > this.workTodo) this.workDone = this.workTodo
|
||||||
|
this.emit('change', this.name, this.completed(), this)
|
||||||
|
}
|
||||||
|
|
||||||
|
Tracker.prototype.finish = function () {
|
||||||
|
this.workTodo = this.workDone = 1
|
||||||
|
this.emit('change', this.name, 1, this)
|
||||||
|
}
|
||||||
2
node_modules/async-limiter/.eslintignore
generated
vendored
Normal file
2
node_modules/async-limiter/.eslintignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
coverage
|
||||||
|
.nyc_output
|
||||||
10
node_modules/async-limiter/.nycrc
generated
vendored
Normal file
10
node_modules/async-limiter/.nycrc
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"check-coverage": false,
|
||||||
|
"lines": 99,
|
||||||
|
"statements": 99,
|
||||||
|
"functions": 99,
|
||||||
|
"branches": 99,
|
||||||
|
"include": [
|
||||||
|
"index.js"
|
||||||
|
]
|
||||||
|
}
|
||||||
9
node_modules/async-limiter/.travis.yml
generated
vendored
Normal file
9
node_modules/async-limiter/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
language: node_js
|
||||||
|
node_js:
|
||||||
|
- "6"
|
||||||
|
- "8"
|
||||||
|
- "10"
|
||||||
|
- "node"
|
||||||
|
script: npm run travis
|
||||||
|
cache:
|
||||||
|
yarn: true
|
||||||
8
node_modules/async-limiter/LICENSE
generated
vendored
Normal file
8
node_modules/async-limiter/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
Copyright (c) 2017 Samuel Reed <samuel.trace.reed@gmail.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
67
node_modules/async-limiter/index.js
generated
vendored
Normal file
67
node_modules/async-limiter/index.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
function Queue(options) {
|
||||||
|
if (!(this instanceof Queue)) {
|
||||||
|
return new Queue(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
options = options || {};
|
||||||
|
this.concurrency = options.concurrency || Infinity;
|
||||||
|
this.pending = 0;
|
||||||
|
this.jobs = [];
|
||||||
|
this.cbs = [];
|
||||||
|
this._done = done.bind(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
var arrayAddMethods = [
|
||||||
|
'push',
|
||||||
|
'unshift',
|
||||||
|
'splice'
|
||||||
|
];
|
||||||
|
|
||||||
|
arrayAddMethods.forEach(function(method) {
|
||||||
|
Queue.prototype[method] = function() {
|
||||||
|
var methodResult = Array.prototype[method].apply(this.jobs, arguments);
|
||||||
|
this._run();
|
||||||
|
return methodResult;
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(Queue.prototype, 'length', {
|
||||||
|
get: function() {
|
||||||
|
return this.pending + this.jobs.length;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Queue.prototype._run = function() {
|
||||||
|
if (this.pending === this.concurrency) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this.jobs.length) {
|
||||||
|
var job = this.jobs.shift();
|
||||||
|
this.pending++;
|
||||||
|
job(this._done);
|
||||||
|
this._run();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.pending === 0) {
|
||||||
|
while (this.cbs.length !== 0) {
|
||||||
|
var cb = this.cbs.pop();
|
||||||
|
process.nextTick(cb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Queue.prototype.onDone = function(cb) {
|
||||||
|
if (typeof cb === 'function') {
|
||||||
|
this.cbs.push(cb);
|
||||||
|
this._run();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function done() {
|
||||||
|
this.pending--;
|
||||||
|
this._run();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Queue;
|
||||||
35
node_modules/async-limiter/package.json
generated
vendored
Normal file
35
node_modules/async-limiter/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "async-limiter",
|
||||||
|
"version": "1.0.1",
|
||||||
|
"description": "asynchronous function queue with adjustable concurrency",
|
||||||
|
"keywords": [
|
||||||
|
"throttle",
|
||||||
|
"async",
|
||||||
|
"limiter",
|
||||||
|
"asynchronous",
|
||||||
|
"job",
|
||||||
|
"task",
|
||||||
|
"concurrency",
|
||||||
|
"concurrent"
|
||||||
|
],
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"coveralls": "^3.0.3",
|
||||||
|
"eslint": "^5.16.0",
|
||||||
|
"eslint-plugin-mocha": "^5.3.0",
|
||||||
|
"intelli-espower-loader": "^1.0.1",
|
||||||
|
"mocha": "^6.1.4",
|
||||||
|
"nyc": "^14.1.1",
|
||||||
|
"power-assert": "^1.6.1"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha --require intelli-espower-loader test/",
|
||||||
|
"travis": "npm run lint && npm run test",
|
||||||
|
"coverage": "nyc npm test && nyc report --reporter=text-lcov | coveralls",
|
||||||
|
"example": "node example",
|
||||||
|
"lint": "eslint ."
|
||||||
|
},
|
||||||
|
"repository": "https://github.com/strml/async-limiter.git",
|
||||||
|
"author": "Samuel Reed <samuel.trace.reed@gmail.com",
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
132
node_modules/async-limiter/readme.md
generated
vendored
Normal file
132
node_modules/async-limiter/readme.md
generated
vendored
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# Async-Limiter
|
||||||
|
|
||||||
|
A module for limiting concurrent asynchronous actions in flight. Forked from [queue](https://github.com/jessetane/queue).
|
||||||
|
|
||||||
|
[](http://www.npmjs.org/async-limiter)
|
||||||
|
[](https://travis-ci.org/STRML/async-limiter)
|
||||||
|
[](https://coveralls.io/r/STRML/async-limiter)
|
||||||
|
|
||||||
|
This module exports a class `Limiter` that implements some of the `Array` API.
|
||||||
|
Pass async functions (ones that accept a callback or return a promise) to an instance's additive array methods.
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
Certain functions, like `zlib`, have [undesirable behavior](https://github.com/nodejs/node/issues/8871#issuecomment-250915913) when
|
||||||
|
run at infinite concurrency.
|
||||||
|
|
||||||
|
In this case, it is actually faster, and takes far less memory, to limit concurrency.
|
||||||
|
|
||||||
|
This module should do the absolute minimum work necessary to queue up functions. PRs are welcome that would
|
||||||
|
make this module faster or lighter, but new functionality is not desired.
|
||||||
|
|
||||||
|
Style should confirm to nodejs/node style.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
``` javascript
|
||||||
|
var Limiter = require('async-limiter')
|
||||||
|
|
||||||
|
var t = new Limiter({concurrency: 2});
|
||||||
|
var results = []
|
||||||
|
|
||||||
|
// add jobs using the familiar Array API
|
||||||
|
t.push(function (cb) {
|
||||||
|
results.push('two')
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
|
||||||
|
t.push(
|
||||||
|
function (cb) {
|
||||||
|
results.push('four')
|
||||||
|
cb()
|
||||||
|
},
|
||||||
|
function (cb) {
|
||||||
|
results.push('five')
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
t.unshift(function (cb) {
|
||||||
|
results.push('one')
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
|
||||||
|
t.splice(2, 0, function (cb) {
|
||||||
|
results.push('three')
|
||||||
|
cb()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Jobs run automatically. If you want a callback when all are done,
|
||||||
|
// call 'onDone()'.
|
||||||
|
t.onDone(function () {
|
||||||
|
console.log('all done:', results)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Zlib Example
|
||||||
|
|
||||||
|
```js
|
||||||
|
const zlib = require('zlib');
|
||||||
|
const Limiter = require('async-limiter');
|
||||||
|
|
||||||
|
const message = {some: "data"};
|
||||||
|
const payload = new Buffer(JSON.stringify(message));
|
||||||
|
|
||||||
|
// Try with different concurrency values to see how this actually
|
||||||
|
// slows significantly with higher concurrency!
|
||||||
|
//
|
||||||
|
// 5: 1398.607ms
|
||||||
|
// 10: 1375.668ms
|
||||||
|
// Infinity: 4423.300ms
|
||||||
|
//
|
||||||
|
const t = new Limiter({concurrency: 5});
|
||||||
|
function deflate(payload, cb) {
|
||||||
|
t.push(function(done) {
|
||||||
|
zlib.deflate(payload, function(err, buffer) {
|
||||||
|
done();
|
||||||
|
cb(err, buffer);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.time('deflate');
|
||||||
|
for(let i = 0; i < 30000; ++i) {
|
||||||
|
deflate(payload, function (err, buffer) {});
|
||||||
|
}
|
||||||
|
t.onDone(function() {
|
||||||
|
console.timeEnd('deflate');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
`npm install async-limiter`
|
||||||
|
|
||||||
|
## Test
|
||||||
|
|
||||||
|
`npm test`
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### `var t = new Limiter([opts])`
|
||||||
|
Constructor. `opts` may contain inital values for:
|
||||||
|
* `t.concurrency`
|
||||||
|
|
||||||
|
## Instance methods
|
||||||
|
|
||||||
|
### `t.onDone(fn)`
|
||||||
|
`fn` will be called once and only once, when the queue is empty.
|
||||||
|
|
||||||
|
## Instance methods mixed in from `Array`
|
||||||
|
Mozilla has docs on how these methods work [here](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array).
|
||||||
|
### `t.push(element1, ..., elementN)`
|
||||||
|
### `t.unshift(element1, ..., elementN)`
|
||||||
|
### `t.splice(index , howMany[, element1[, ...[, elementN]]])`
|
||||||
|
|
||||||
|
## Properties
|
||||||
|
### `t.concurrency`
|
||||||
|
Max number of jobs the queue should process concurrently, defaults to `Infinity`.
|
||||||
|
|
||||||
|
### `t.length`
|
||||||
|
Jobs pending + jobs to process (readonly).
|
||||||
|
|
||||||
233
node_modules/asynckit/README.md
generated
vendored
233
node_modules/asynckit/README.md
generated
vendored
@@ -1,233 +0,0 @@
|
|||||||
# asynckit [](https://www.npmjs.com/package/asynckit)
|
|
||||||
|
|
||||||
Minimal async jobs utility library, with streams support.
|
|
||||||
|
|
||||||
[](https://travis-ci.org/alexindigo/asynckit)
|
|
||||||
[](https://travis-ci.org/alexindigo/asynckit)
|
|
||||||
[](https://ci.appveyor.com/project/alexindigo/asynckit)
|
|
||||||
|
|
||||||
[](https://coveralls.io/github/alexindigo/asynckit?branch=master)
|
|
||||||
[](https://david-dm.org/alexindigo/asynckit)
|
|
||||||
[](https://www.bithound.io/github/alexindigo/asynckit)
|
|
||||||
|
|
||||||
<!-- [](https://www.npmjs.com/package/reamde) -->
|
|
||||||
|
|
||||||
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
|
|
||||||
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
|
|
||||||
|
|
||||||
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
|
|
||||||
|
|
||||||
| compression | size |
|
|
||||||
| :----------------- | -------: |
|
|
||||||
| asynckit.js | 12.34 kB |
|
|
||||||
| asynckit.min.js | 4.11 kB |
|
|
||||||
| asynckit.min.js.gz | 1.47 kB |
|
|
||||||
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ npm install --save asynckit
|
|
||||||
```
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
### Parallel Jobs
|
|
||||||
|
|
||||||
Runs iterator over provided array in parallel. Stores output in the `result` array,
|
|
||||||
on the matching positions. In unlikely event of an error from one of the jobs,
|
|
||||||
will terminate rest of the active jobs (if abort function is provided)
|
|
||||||
and return error along with salvaged data to the main callback function.
|
|
||||||
|
|
||||||
#### Input Array
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
var parallel = require('asynckit').parallel
|
|
||||||
, assert = require('assert')
|
|
||||||
;
|
|
||||||
|
|
||||||
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
|
||||||
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
|
||||||
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
|
||||||
, target = []
|
|
||||||
;
|
|
||||||
|
|
||||||
parallel(source, asyncJob, function(err, result)
|
|
||||||
{
|
|
||||||
assert.deepEqual(result, expectedResult);
|
|
||||||
assert.deepEqual(target, expectedTarget);
|
|
||||||
});
|
|
||||||
|
|
||||||
// async job accepts one element from the array
|
|
||||||
// and a callback function
|
|
||||||
function asyncJob(item, cb)
|
|
||||||
{
|
|
||||||
// different delays (in ms) per item
|
|
||||||
var delay = item * 25;
|
|
||||||
|
|
||||||
// pretend different jobs take different time to finish
|
|
||||||
// and not in consequential order
|
|
||||||
var timeoutId = setTimeout(function() {
|
|
||||||
target.push(item);
|
|
||||||
cb(null, item * 2);
|
|
||||||
}, delay);
|
|
||||||
|
|
||||||
// allow to cancel "leftover" jobs upon error
|
|
||||||
// return function, invoking of which will abort this job
|
|
||||||
return clearTimeout.bind(null, timeoutId);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
|
|
||||||
|
|
||||||
#### Input Object
|
|
||||||
|
|
||||||
Also it supports named jobs, listed via object.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
var parallel = require('asynckit/parallel')
|
|
||||||
, assert = require('assert')
|
|
||||||
;
|
|
||||||
|
|
||||||
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
|
||||||
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
|
||||||
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
|
||||||
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
|
|
||||||
, target = []
|
|
||||||
, keys = []
|
|
||||||
;
|
|
||||||
|
|
||||||
parallel(source, asyncJob, function(err, result)
|
|
||||||
{
|
|
||||||
assert.deepEqual(result, expectedResult);
|
|
||||||
assert.deepEqual(target, expectedTarget);
|
|
||||||
assert.deepEqual(keys, expectedKeys);
|
|
||||||
});
|
|
||||||
|
|
||||||
// supports full value, key, callback (shortcut) interface
|
|
||||||
function asyncJob(item, key, cb)
|
|
||||||
{
|
|
||||||
// different delays (in ms) per item
|
|
||||||
var delay = item * 25;
|
|
||||||
|
|
||||||
// pretend different jobs take different time to finish
|
|
||||||
// and not in consequential order
|
|
||||||
var timeoutId = setTimeout(function() {
|
|
||||||
keys.push(key);
|
|
||||||
target.push(item);
|
|
||||||
cb(null, item * 2);
|
|
||||||
}, delay);
|
|
||||||
|
|
||||||
// allow to cancel "leftover" jobs upon error
|
|
||||||
// return function, invoking of which will abort this job
|
|
||||||
return clearTimeout.bind(null, timeoutId);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
|
|
||||||
|
|
||||||
### Serial Jobs
|
|
||||||
|
|
||||||
Runs iterator over provided array sequentially. Stores output in the `result` array,
|
|
||||||
on the matching positions. In unlikely event of an error from one of the jobs,
|
|
||||||
will not proceed to the rest of the items in the list
|
|
||||||
and return error along with salvaged data to the main callback function.
|
|
||||||
|
|
||||||
#### Input Array
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
var serial = require('asynckit/serial')
|
|
||||||
, assert = require('assert')
|
|
||||||
;
|
|
||||||
|
|
||||||
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
|
||||||
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
|
||||||
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
|
||||||
, target = []
|
|
||||||
;
|
|
||||||
|
|
||||||
serial(source, asyncJob, function(err, result)
|
|
||||||
{
|
|
||||||
assert.deepEqual(result, expectedResult);
|
|
||||||
assert.deepEqual(target, expectedTarget);
|
|
||||||
});
|
|
||||||
|
|
||||||
// extended interface (item, key, callback)
|
|
||||||
// also supported for arrays
|
|
||||||
function asyncJob(item, key, cb)
|
|
||||||
{
|
|
||||||
target.push(key);
|
|
||||||
|
|
||||||
// it will be automatically made async
|
|
||||||
// even it iterator "returns" in the same event loop
|
|
||||||
cb(null, item * 2);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
|
|
||||||
|
|
||||||
#### Input Object
|
|
||||||
|
|
||||||
Also it supports named jobs, listed via object.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
var serial = require('asynckit').serial
|
|
||||||
, assert = require('assert')
|
|
||||||
;
|
|
||||||
|
|
||||||
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
|
||||||
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
|
||||||
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
|
||||||
, target = []
|
|
||||||
;
|
|
||||||
|
|
||||||
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
|
||||||
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
|
||||||
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
|
||||||
, target = []
|
|
||||||
;
|
|
||||||
|
|
||||||
|
|
||||||
serial(source, asyncJob, function(err, result)
|
|
||||||
{
|
|
||||||
assert.deepEqual(result, expectedResult);
|
|
||||||
assert.deepEqual(target, expectedTarget);
|
|
||||||
});
|
|
||||||
|
|
||||||
// shortcut interface (item, callback)
|
|
||||||
// works for object as well as for the arrays
|
|
||||||
function asyncJob(item, cb)
|
|
||||||
{
|
|
||||||
target.push(item);
|
|
||||||
|
|
||||||
// it will be automatically made async
|
|
||||||
// even it iterator "returns" in the same event loop
|
|
||||||
cb(null, item * 2);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
|
|
||||||
|
|
||||||
_Note: Since _object_ is an _unordered_ collection of properties,
|
|
||||||
it may produce unexpected results with sequential iterations.
|
|
||||||
Whenever order of the jobs' execution is important please use `serialOrdered` method._
|
|
||||||
|
|
||||||
### Ordered Serial Iterations
|
|
||||||
|
|
||||||
TBD
|
|
||||||
|
|
||||||
For example [compare-property](compare-property) package.
|
|
||||||
|
|
||||||
### Streaming interface
|
|
||||||
|
|
||||||
TBD
|
|
||||||
|
|
||||||
## Want to Know More?
|
|
||||||
|
|
||||||
More examples can be found in [test folder](test/).
|
|
||||||
|
|
||||||
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
AsyncKit is licensed under the MIT license.
|
|
||||||
76
node_modules/asynckit/bench.js
generated
vendored
76
node_modules/asynckit/bench.js
generated
vendored
@@ -1,76 +0,0 @@
|
|||||||
/* eslint no-console: "off" */
|
|
||||||
|
|
||||||
var asynckit = require('./')
|
|
||||||
, async = require('async')
|
|
||||||
, assert = require('assert')
|
|
||||||
, expected = 0
|
|
||||||
;
|
|
||||||
|
|
||||||
var Benchmark = require('benchmark');
|
|
||||||
var suite = new Benchmark.Suite;
|
|
||||||
|
|
||||||
var source = [];
|
|
||||||
for (var z = 1; z < 100; z++)
|
|
||||||
{
|
|
||||||
source.push(z);
|
|
||||||
expected += z;
|
|
||||||
}
|
|
||||||
|
|
||||||
suite
|
|
||||||
// add tests
|
|
||||||
|
|
||||||
.add('async.map', function(deferred)
|
|
||||||
{
|
|
||||||
var total = 0;
|
|
||||||
|
|
||||||
async.map(source,
|
|
||||||
function(i, cb)
|
|
||||||
{
|
|
||||||
setImmediate(function()
|
|
||||||
{
|
|
||||||
total += i;
|
|
||||||
cb(null, total);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
function(err, result)
|
|
||||||
{
|
|
||||||
assert.ifError(err);
|
|
||||||
assert.equal(result[result.length - 1], expected);
|
|
||||||
deferred.resolve();
|
|
||||||
});
|
|
||||||
}, {'defer': true})
|
|
||||||
|
|
||||||
|
|
||||||
.add('asynckit.parallel', function(deferred)
|
|
||||||
{
|
|
||||||
var total = 0;
|
|
||||||
|
|
||||||
asynckit.parallel(source,
|
|
||||||
function(i, cb)
|
|
||||||
{
|
|
||||||
setImmediate(function()
|
|
||||||
{
|
|
||||||
total += i;
|
|
||||||
cb(null, total);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
function(err, result)
|
|
||||||
{
|
|
||||||
assert.ifError(err);
|
|
||||||
assert.equal(result[result.length - 1], expected);
|
|
||||||
deferred.resolve();
|
|
||||||
});
|
|
||||||
}, {'defer': true})
|
|
||||||
|
|
||||||
|
|
||||||
// add listeners
|
|
||||||
.on('cycle', function(ev)
|
|
||||||
{
|
|
||||||
console.log(String(ev.target));
|
|
||||||
})
|
|
||||||
.on('complete', function()
|
|
||||||
{
|
|
||||||
console.log('Fastest is ' + this.filter('fastest').map('name'));
|
|
||||||
})
|
|
||||||
// run async
|
|
||||||
.run({ 'async': true });
|
|
||||||
6
node_modules/asynckit/index.js
generated
vendored
6
node_modules/asynckit/index.js
generated
vendored
@@ -1,6 +0,0 @@
|
|||||||
module.exports =
|
|
||||||
{
|
|
||||||
parallel : require('./parallel.js'),
|
|
||||||
serial : require('./serial.js'),
|
|
||||||
serialOrdered : require('./serialOrdered.js')
|
|
||||||
};
|
|
||||||
29
node_modules/asynckit/lib/abort.js
generated
vendored
29
node_modules/asynckit/lib/abort.js
generated
vendored
@@ -1,29 +0,0 @@
|
|||||||
// API
|
|
||||||
module.exports = abort;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Aborts leftover active jobs
|
|
||||||
*
|
|
||||||
* @param {object} state - current state object
|
|
||||||
*/
|
|
||||||
function abort(state)
|
|
||||||
{
|
|
||||||
Object.keys(state.jobs).forEach(clean.bind(state));
|
|
||||||
|
|
||||||
// reset leftover jobs
|
|
||||||
state.jobs = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cleans up leftover job by invoking abort function for the provided job id
|
|
||||||
*
|
|
||||||
* @this state
|
|
||||||
* @param {string|number} key - job id to abort
|
|
||||||
*/
|
|
||||||
function clean(key)
|
|
||||||
{
|
|
||||||
if (typeof this.jobs[key] == 'function')
|
|
||||||
{
|
|
||||||
this.jobs[key]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
34
node_modules/asynckit/lib/async.js
generated
vendored
34
node_modules/asynckit/lib/async.js
generated
vendored
@@ -1,34 +0,0 @@
|
|||||||
var defer = require('./defer.js');
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = async;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs provided callback asynchronously
|
|
||||||
* even if callback itself is not
|
|
||||||
*
|
|
||||||
* @param {function} callback - callback to invoke
|
|
||||||
* @returns {function} - augmented callback
|
|
||||||
*/
|
|
||||||
function async(callback)
|
|
||||||
{
|
|
||||||
var isAsync = false;
|
|
||||||
|
|
||||||
// check if async happened
|
|
||||||
defer(function() { isAsync = true; });
|
|
||||||
|
|
||||||
return function async_callback(err, result)
|
|
||||||
{
|
|
||||||
if (isAsync)
|
|
||||||
{
|
|
||||||
callback(err, result);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
defer(function nextTick_callback()
|
|
||||||
{
|
|
||||||
callback(err, result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
26
node_modules/asynckit/lib/defer.js
generated
vendored
26
node_modules/asynckit/lib/defer.js
generated
vendored
@@ -1,26 +0,0 @@
|
|||||||
module.exports = defer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs provided function on next iteration of the event loop
|
|
||||||
*
|
|
||||||
* @param {function} fn - function to run
|
|
||||||
*/
|
|
||||||
function defer(fn)
|
|
||||||
{
|
|
||||||
var nextTick = typeof setImmediate == 'function'
|
|
||||||
? setImmediate
|
|
||||||
: (
|
|
||||||
typeof process == 'object' && typeof process.nextTick == 'function'
|
|
||||||
? process.nextTick
|
|
||||||
: null
|
|
||||||
);
|
|
||||||
|
|
||||||
if (nextTick)
|
|
||||||
{
|
|
||||||
nextTick(fn);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
setTimeout(fn, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
75
node_modules/asynckit/lib/iterate.js
generated
vendored
75
node_modules/asynckit/lib/iterate.js
generated
vendored
@@ -1,75 +0,0 @@
|
|||||||
var async = require('./async.js')
|
|
||||||
, abort = require('./abort.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = iterate;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterates over each job object
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {object} state - current job status
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
*/
|
|
||||||
function iterate(list, iterator, state, callback)
|
|
||||||
{
|
|
||||||
// store current index
|
|
||||||
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
|
|
||||||
|
|
||||||
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
|
|
||||||
{
|
|
||||||
// don't repeat yourself
|
|
||||||
// skip secondary callbacks
|
|
||||||
if (!(key in state.jobs))
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// clean up jobs
|
|
||||||
delete state.jobs[key];
|
|
||||||
|
|
||||||
if (error)
|
|
||||||
{
|
|
||||||
// don't process rest of the results
|
|
||||||
// stop still active jobs
|
|
||||||
// and reset the list
|
|
||||||
abort(state);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
state.results[key] = output;
|
|
||||||
}
|
|
||||||
|
|
||||||
// return salvaged results
|
|
||||||
callback(error, state.results);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs iterator over provided job element
|
|
||||||
*
|
|
||||||
* @param {function} iterator - iterator to invoke
|
|
||||||
* @param {string|number} key - key/index of the element in the list of jobs
|
|
||||||
* @param {mixed} item - job description
|
|
||||||
* @param {function} callback - invoked after iterator is done with the job
|
|
||||||
* @returns {function|mixed} - job abort function or something else
|
|
||||||
*/
|
|
||||||
function runJob(iterator, key, item, callback)
|
|
||||||
{
|
|
||||||
var aborter;
|
|
||||||
|
|
||||||
// allow shortcut if iterator expects only two arguments
|
|
||||||
if (iterator.length == 2)
|
|
||||||
{
|
|
||||||
aborter = iterator(item, async(callback));
|
|
||||||
}
|
|
||||||
// otherwise go with full three arguments
|
|
||||||
else
|
|
||||||
{
|
|
||||||
aborter = iterator(item, key, async(callback));
|
|
||||||
}
|
|
||||||
|
|
||||||
return aborter;
|
|
||||||
}
|
|
||||||
91
node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
91
node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
@@ -1,91 +0,0 @@
|
|||||||
var streamify = require('./streamify.js')
|
|
||||||
, defer = require('./defer.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = ReadableAsyncKit;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base constructor for all streams
|
|
||||||
* used to hold properties/methods
|
|
||||||
*/
|
|
||||||
function ReadableAsyncKit()
|
|
||||||
{
|
|
||||||
ReadableAsyncKit.super_.apply(this, arguments);
|
|
||||||
|
|
||||||
// list of active jobs
|
|
||||||
this.jobs = {};
|
|
||||||
|
|
||||||
// add stream methods
|
|
||||||
this.destroy = destroy;
|
|
||||||
this._start = _start;
|
|
||||||
this._read = _read;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Destroys readable stream,
|
|
||||||
* by aborting outstanding jobs
|
|
||||||
*
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
function destroy()
|
|
||||||
{
|
|
||||||
if (this.destroyed)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.destroyed = true;
|
|
||||||
|
|
||||||
if (typeof this.terminator == 'function')
|
|
||||||
{
|
|
||||||
this.terminator();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Starts provided jobs in async manner
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
function _start()
|
|
||||||
{
|
|
||||||
// first argument – runner function
|
|
||||||
var runner = arguments[0]
|
|
||||||
// take away first argument
|
|
||||||
, args = Array.prototype.slice.call(arguments, 1)
|
|
||||||
// second argument - input data
|
|
||||||
, input = args[0]
|
|
||||||
// last argument - result callback
|
|
||||||
, endCb = streamify.callback.call(this, args[args.length - 1])
|
|
||||||
;
|
|
||||||
|
|
||||||
args[args.length - 1] = endCb;
|
|
||||||
// third argument - iterator
|
|
||||||
args[1] = streamify.iterator.call(this, args[1]);
|
|
||||||
|
|
||||||
// allow time for proper setup
|
|
||||||
defer(function()
|
|
||||||
{
|
|
||||||
if (!this.destroyed)
|
|
||||||
{
|
|
||||||
this.terminator = runner.apply(null, args);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
endCb(null, Array.isArray(input) ? [] : {});
|
|
||||||
}
|
|
||||||
}.bind(this));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Implement _read to comply with Readable streams
|
|
||||||
* Doesn't really make sense for flowing object mode
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
function _read()
|
|
||||||
{
|
|
||||||
|
|
||||||
}
|
|
||||||
25
node_modules/asynckit/lib/readable_parallel.js
generated
vendored
25
node_modules/asynckit/lib/readable_parallel.js
generated
vendored
@@ -1,25 +0,0 @@
|
|||||||
var parallel = require('../parallel.js');
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = ReadableParallel;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Streaming wrapper to `asynckit.parallel`
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {stream.Readable#}
|
|
||||||
*/
|
|
||||||
function ReadableParallel(list, iterator, callback)
|
|
||||||
{
|
|
||||||
if (!(this instanceof ReadableParallel))
|
|
||||||
{
|
|
||||||
return new ReadableParallel(list, iterator, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
// turn on object mode
|
|
||||||
ReadableParallel.super_.call(this, {objectMode: true});
|
|
||||||
|
|
||||||
this._start(parallel, list, iterator, callback);
|
|
||||||
}
|
|
||||||
25
node_modules/asynckit/lib/readable_serial.js
generated
vendored
25
node_modules/asynckit/lib/readable_serial.js
generated
vendored
@@ -1,25 +0,0 @@
|
|||||||
var serial = require('../serial.js');
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = ReadableSerial;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Streaming wrapper to `asynckit.serial`
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {stream.Readable#}
|
|
||||||
*/
|
|
||||||
function ReadableSerial(list, iterator, callback)
|
|
||||||
{
|
|
||||||
if (!(this instanceof ReadableSerial))
|
|
||||||
{
|
|
||||||
return new ReadableSerial(list, iterator, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
// turn on object mode
|
|
||||||
ReadableSerial.super_.call(this, {objectMode: true});
|
|
||||||
|
|
||||||
this._start(serial, list, iterator, callback);
|
|
||||||
}
|
|
||||||
29
node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
29
node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
@@ -1,29 +0,0 @@
|
|||||||
var serialOrdered = require('../serialOrdered.js');
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = ReadableSerialOrdered;
|
|
||||||
// expose sort helpers
|
|
||||||
module.exports.ascending = serialOrdered.ascending;
|
|
||||||
module.exports.descending = serialOrdered.descending;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Streaming wrapper to `asynckit.serialOrdered`
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} sortMethod - custom sort function
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {stream.Readable#}
|
|
||||||
*/
|
|
||||||
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
|
|
||||||
{
|
|
||||||
if (!(this instanceof ReadableSerialOrdered))
|
|
||||||
{
|
|
||||||
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
// turn on object mode
|
|
||||||
ReadableSerialOrdered.super_.call(this, {objectMode: true});
|
|
||||||
|
|
||||||
this._start(serialOrdered, list, iterator, sortMethod, callback);
|
|
||||||
}
|
|
||||||
37
node_modules/asynckit/lib/state.js
generated
vendored
37
node_modules/asynckit/lib/state.js
generated
vendored
@@ -1,37 +0,0 @@
|
|||||||
// API
|
|
||||||
module.exports = state;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates initial state object
|
|
||||||
* for iteration over list
|
|
||||||
*
|
|
||||||
* @param {array|object} list - list to iterate over
|
|
||||||
* @param {function|null} sortMethod - function to use for keys sort,
|
|
||||||
* or `null` to keep them as is
|
|
||||||
* @returns {object} - initial state object
|
|
||||||
*/
|
|
||||||
function state(list, sortMethod)
|
|
||||||
{
|
|
||||||
var isNamedList = !Array.isArray(list)
|
|
||||||
, initState =
|
|
||||||
{
|
|
||||||
index : 0,
|
|
||||||
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
|
|
||||||
jobs : {},
|
|
||||||
results : isNamedList ? {} : [],
|
|
||||||
size : isNamedList ? Object.keys(list).length : list.length
|
|
||||||
}
|
|
||||||
;
|
|
||||||
|
|
||||||
if (sortMethod)
|
|
||||||
{
|
|
||||||
// sort array keys based on it's values
|
|
||||||
// sort object's keys just on own merit
|
|
||||||
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
|
|
||||||
{
|
|
||||||
return sortMethod(list[a], list[b]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return initState;
|
|
||||||
}
|
|
||||||
141
node_modules/asynckit/lib/streamify.js
generated
vendored
141
node_modules/asynckit/lib/streamify.js
generated
vendored
@@ -1,141 +0,0 @@
|
|||||||
var async = require('./async.js');
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = {
|
|
||||||
iterator: wrapIterator,
|
|
||||||
callback: wrapCallback
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps iterators with long signature
|
|
||||||
*
|
|
||||||
* @this ReadableAsyncKit#
|
|
||||||
* @param {function} iterator - function to wrap
|
|
||||||
* @returns {function} - wrapped function
|
|
||||||
*/
|
|
||||||
function wrapIterator(iterator)
|
|
||||||
{
|
|
||||||
var stream = this;
|
|
||||||
|
|
||||||
return function(item, key, cb)
|
|
||||||
{
|
|
||||||
var aborter
|
|
||||||
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
|
|
||||||
;
|
|
||||||
|
|
||||||
stream.jobs[key] = wrappedCb;
|
|
||||||
|
|
||||||
// it's either shortcut (item, cb)
|
|
||||||
if (iterator.length == 2)
|
|
||||||
{
|
|
||||||
aborter = iterator(item, wrappedCb);
|
|
||||||
}
|
|
||||||
// or long format (item, key, cb)
|
|
||||||
else
|
|
||||||
{
|
|
||||||
aborter = iterator(item, key, wrappedCb);
|
|
||||||
}
|
|
||||||
|
|
||||||
return aborter;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps provided callback function
|
|
||||||
* allowing to execute snitch function before
|
|
||||||
* real callback
|
|
||||||
*
|
|
||||||
* @this ReadableAsyncKit#
|
|
||||||
* @param {function} callback - function to wrap
|
|
||||||
* @returns {function} - wrapped function
|
|
||||||
*/
|
|
||||||
function wrapCallback(callback)
|
|
||||||
{
|
|
||||||
var stream = this;
|
|
||||||
|
|
||||||
var wrapped = function(error, result)
|
|
||||||
{
|
|
||||||
return finisher.call(stream, error, result, callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
return wrapped;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps provided iterator callback function
|
|
||||||
* makes sure snitch only called once,
|
|
||||||
* but passes secondary calls to the original callback
|
|
||||||
*
|
|
||||||
* @this ReadableAsyncKit#
|
|
||||||
* @param {function} callback - callback to wrap
|
|
||||||
* @param {number|string} key - iteration key
|
|
||||||
* @returns {function} wrapped callback
|
|
||||||
*/
|
|
||||||
function wrapIteratorCallback(callback, key)
|
|
||||||
{
|
|
||||||
var stream = this;
|
|
||||||
|
|
||||||
return function(error, output)
|
|
||||||
{
|
|
||||||
// don't repeat yourself
|
|
||||||
if (!(key in stream.jobs))
|
|
||||||
{
|
|
||||||
callback(error, output);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// clean up jobs
|
|
||||||
delete stream.jobs[key];
|
|
||||||
|
|
||||||
return streamer.call(stream, error, {key: key, value: output}, callback);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stream wrapper for iterator callback
|
|
||||||
*
|
|
||||||
* @this ReadableAsyncKit#
|
|
||||||
* @param {mixed} error - error response
|
|
||||||
* @param {mixed} output - iterator output
|
|
||||||
* @param {function} callback - callback that expects iterator results
|
|
||||||
*/
|
|
||||||
function streamer(error, output, callback)
|
|
||||||
{
|
|
||||||
if (error && !this.error)
|
|
||||||
{
|
|
||||||
this.error = error;
|
|
||||||
this.pause();
|
|
||||||
this.emit('error', error);
|
|
||||||
// send back value only, as expected
|
|
||||||
callback(error, output && output.value);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// stream stuff
|
|
||||||
this.push(output);
|
|
||||||
|
|
||||||
// back to original track
|
|
||||||
// send back value only, as expected
|
|
||||||
callback(error, output && output.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stream wrapper for finishing callback
|
|
||||||
*
|
|
||||||
* @this ReadableAsyncKit#
|
|
||||||
* @param {mixed} error - error response
|
|
||||||
* @param {mixed} output - iterator output
|
|
||||||
* @param {function} callback - callback that expects final results
|
|
||||||
*/
|
|
||||||
function finisher(error, output, callback)
|
|
||||||
{
|
|
||||||
// signal end of the stream
|
|
||||||
// only for successfully finished streams
|
|
||||||
if (!error)
|
|
||||||
{
|
|
||||||
this.push(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
// back to original track
|
|
||||||
callback(error, output);
|
|
||||||
}
|
|
||||||
29
node_modules/asynckit/lib/terminator.js
generated
vendored
29
node_modules/asynckit/lib/terminator.js
generated
vendored
@@ -1,29 +0,0 @@
|
|||||||
var abort = require('./abort.js')
|
|
||||||
, async = require('./async.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports = terminator;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Terminates jobs in the attached state context
|
|
||||||
*
|
|
||||||
* @this AsyncKitState#
|
|
||||||
* @param {function} callback - final callback to invoke after termination
|
|
||||||
*/
|
|
||||||
function terminator(callback)
|
|
||||||
{
|
|
||||||
if (!Object.keys(this.jobs).length)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// fast forward iteration index
|
|
||||||
this.index = this.size;
|
|
||||||
|
|
||||||
// abort jobs
|
|
||||||
abort(this);
|
|
||||||
|
|
||||||
// send back results we have so far
|
|
||||||
async(callback)(null, this.results);
|
|
||||||
}
|
|
||||||
63
node_modules/asynckit/package.json
generated
vendored
63
node_modules/asynckit/package.json
generated
vendored
@@ -1,63 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "asynckit",
|
|
||||||
"version": "0.4.0",
|
|
||||||
"description": "Minimal async jobs utility library, with streams support",
|
|
||||||
"main": "index.js",
|
|
||||||
"scripts": {
|
|
||||||
"clean": "rimraf coverage",
|
|
||||||
"lint": "eslint *.js lib/*.js test/*.js",
|
|
||||||
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
|
|
||||||
"win-test": "tape test/test-*.js",
|
|
||||||
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
|
|
||||||
"report": "istanbul report",
|
|
||||||
"size": "browserify index.js | size-table asynckit",
|
|
||||||
"debug": "tape test/test-*.js"
|
|
||||||
},
|
|
||||||
"pre-commit": [
|
|
||||||
"clean",
|
|
||||||
"lint",
|
|
||||||
"test",
|
|
||||||
"browser",
|
|
||||||
"report",
|
|
||||||
"size"
|
|
||||||
],
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/alexindigo/asynckit.git"
|
|
||||||
},
|
|
||||||
"keywords": [
|
|
||||||
"async",
|
|
||||||
"jobs",
|
|
||||||
"parallel",
|
|
||||||
"serial",
|
|
||||||
"iterator",
|
|
||||||
"array",
|
|
||||||
"object",
|
|
||||||
"stream",
|
|
||||||
"destroy",
|
|
||||||
"terminate",
|
|
||||||
"abort"
|
|
||||||
],
|
|
||||||
"author": "Alex Indigo <iam@alexindigo.com>",
|
|
||||||
"license": "MIT",
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/alexindigo/asynckit/issues"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/alexindigo/asynckit#readme",
|
|
||||||
"devDependencies": {
|
|
||||||
"browserify": "^13.0.0",
|
|
||||||
"browserify-istanbul": "^2.0.0",
|
|
||||||
"coveralls": "^2.11.9",
|
|
||||||
"eslint": "^2.9.0",
|
|
||||||
"istanbul": "^0.4.3",
|
|
||||||
"obake": "^0.1.2",
|
|
||||||
"phantomjs-prebuilt": "^2.1.7",
|
|
||||||
"pre-commit": "^1.1.3",
|
|
||||||
"reamde": "^1.1.0",
|
|
||||||
"rimraf": "^2.5.2",
|
|
||||||
"size-table": "^0.2.0",
|
|
||||||
"tap-spec": "^4.1.1",
|
|
||||||
"tape": "^4.5.1"
|
|
||||||
},
|
|
||||||
"dependencies": {}
|
|
||||||
}
|
|
||||||
43
node_modules/asynckit/parallel.js
generated
vendored
43
node_modules/asynckit/parallel.js
generated
vendored
@@ -1,43 +0,0 @@
|
|||||||
var iterate = require('./lib/iterate.js')
|
|
||||||
, initState = require('./lib/state.js')
|
|
||||||
, terminator = require('./lib/terminator.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// Public API
|
|
||||||
module.exports = parallel;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs iterator over provided array elements in parallel
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {function} - jobs terminator
|
|
||||||
*/
|
|
||||||
function parallel(list, iterator, callback)
|
|
||||||
{
|
|
||||||
var state = initState(list);
|
|
||||||
|
|
||||||
while (state.index < (state['keyedList'] || list).length)
|
|
||||||
{
|
|
||||||
iterate(list, iterator, state, function(error, result)
|
|
||||||
{
|
|
||||||
if (error)
|
|
||||||
{
|
|
||||||
callback(error, result);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// looks like it's the last one
|
|
||||||
if (Object.keys(state.jobs).length === 0)
|
|
||||||
{
|
|
||||||
callback(null, state.results);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
state.index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return terminator.bind(state, callback);
|
|
||||||
}
|
|
||||||
17
node_modules/asynckit/serial.js
generated
vendored
17
node_modules/asynckit/serial.js
generated
vendored
@@ -1,17 +0,0 @@
|
|||||||
var serialOrdered = require('./serialOrdered.js');
|
|
||||||
|
|
||||||
// Public API
|
|
||||||
module.exports = serial;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs iterator over provided array elements in series
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {function} - jobs terminator
|
|
||||||
*/
|
|
||||||
function serial(list, iterator, callback)
|
|
||||||
{
|
|
||||||
return serialOrdered(list, iterator, null, callback);
|
|
||||||
}
|
|
||||||
75
node_modules/asynckit/serialOrdered.js
generated
vendored
75
node_modules/asynckit/serialOrdered.js
generated
vendored
@@ -1,75 +0,0 @@
|
|||||||
var iterate = require('./lib/iterate.js')
|
|
||||||
, initState = require('./lib/state.js')
|
|
||||||
, terminator = require('./lib/terminator.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// Public API
|
|
||||||
module.exports = serialOrdered;
|
|
||||||
// sorting helpers
|
|
||||||
module.exports.ascending = ascending;
|
|
||||||
module.exports.descending = descending;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs iterator over provided sorted array elements in series
|
|
||||||
*
|
|
||||||
* @param {array|object} list - array or object (named list) to iterate over
|
|
||||||
* @param {function} iterator - iterator to run
|
|
||||||
* @param {function} sortMethod - custom sort function
|
|
||||||
* @param {function} callback - invoked when all elements processed
|
|
||||||
* @returns {function} - jobs terminator
|
|
||||||
*/
|
|
||||||
function serialOrdered(list, iterator, sortMethod, callback)
|
|
||||||
{
|
|
||||||
var state = initState(list, sortMethod);
|
|
||||||
|
|
||||||
iterate(list, iterator, state, function iteratorHandler(error, result)
|
|
||||||
{
|
|
||||||
if (error)
|
|
||||||
{
|
|
||||||
callback(error, result);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.index++;
|
|
||||||
|
|
||||||
// are we there yet?
|
|
||||||
if (state.index < (state['keyedList'] || list).length)
|
|
||||||
{
|
|
||||||
iterate(list, iterator, state, iteratorHandler);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// done here
|
|
||||||
callback(null, state.results);
|
|
||||||
});
|
|
||||||
|
|
||||||
return terminator.bind(state, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* -- Sort methods
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* sort helper to sort array elements in ascending order
|
|
||||||
*
|
|
||||||
* @param {mixed} a - an item to compare
|
|
||||||
* @param {mixed} b - an item to compare
|
|
||||||
* @returns {number} - comparison result
|
|
||||||
*/
|
|
||||||
function ascending(a, b)
|
|
||||||
{
|
|
||||||
return a < b ? -1 : a > b ? 1 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* sort helper to sort array elements in descending order
|
|
||||||
*
|
|
||||||
* @param {mixed} a - an item to compare
|
|
||||||
* @param {mixed} b - an item to compare
|
|
||||||
* @returns {number} - comparison result
|
|
||||||
*/
|
|
||||||
function descending(a, b)
|
|
||||||
{
|
|
||||||
return -1 * ascending(a, b);
|
|
||||||
}
|
|
||||||
21
node_modules/asynckit/stream.js
generated
vendored
21
node_modules/asynckit/stream.js
generated
vendored
@@ -1,21 +0,0 @@
|
|||||||
var inherits = require('util').inherits
|
|
||||||
, Readable = require('stream').Readable
|
|
||||||
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
|
|
||||||
, ReadableParallel = require('./lib/readable_parallel.js')
|
|
||||||
, ReadableSerial = require('./lib/readable_serial.js')
|
|
||||||
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
|
|
||||||
;
|
|
||||||
|
|
||||||
// API
|
|
||||||
module.exports =
|
|
||||||
{
|
|
||||||
parallel : ReadableParallel,
|
|
||||||
serial : ReadableSerial,
|
|
||||||
serialOrdered : ReadableSerialOrdered,
|
|
||||||
};
|
|
||||||
|
|
||||||
inherits(ReadableAsyncKit, Readable);
|
|
||||||
|
|
||||||
inherits(ReadableParallel, ReadableAsyncKit);
|
|
||||||
inherits(ReadableSerial, ReadableAsyncKit);
|
|
||||||
inherits(ReadableSerialOrdered, ReadableAsyncKit);
|
|
||||||
1014
node_modules/axios/CHANGELOG.md
generated
vendored
1014
node_modules/axios/CHANGELOG.md
generated
vendored
File diff suppressed because it is too large
Load Diff
1182
node_modules/axios/README.md
generated
vendored
1182
node_modules/axios/README.md
generated
vendored
File diff suppressed because it is too large
Load Diff
5
node_modules/axios/SECURITY.md
generated
vendored
5
node_modules/axios/SECURITY.md
generated
vendored
@@ -1,5 +0,0 @@
|
|||||||
# Reporting a Vulnerability
|
|
||||||
|
|
||||||
If you discover a security vulnerability within axios, please submit a report via [huntr.dev](https://huntr.dev/bounties/?target=https%3A%2F%2Fgithub.com%2Faxios%2Faxios). Bounties and CVEs are automatically managed and allocated via the platform.
|
|
||||||
|
|
||||||
All security vulnerabilities will be promptly addressed.
|
|
||||||
169
node_modules/axios/UPGRADE_GUIDE.md
generated
vendored
169
node_modules/axios/UPGRADE_GUIDE.md
generated
vendored
@@ -1,169 +0,0 @@
|
|||||||
# Upgrade Guide
|
|
||||||
|
|
||||||
## 0.18.x -> 0.19.0
|
|
||||||
|
|
||||||
### HTTPS Proxies
|
|
||||||
|
|
||||||
Routing through an https proxy now requires setting the `protocol` attribute of the proxy configuration to `https`
|
|
||||||
|
|
||||||
## 0.15.x -> 0.16.0
|
|
||||||
|
|
||||||
### `Promise` Type Declarations
|
|
||||||
|
|
||||||
The `Promise` type declarations have been removed from the axios typings in favor of the built-in type declarations. If you use axios in a TypeScript project that targets `ES5`, please make sure to include the `es2015.promise` lib. Please see [this post](https://blog.mariusschulz.com/2016/11/25/typescript-2-0-built-in-type-declarations) for details.
|
|
||||||
|
|
||||||
## 0.13.x -> 0.14.0
|
|
||||||
|
|
||||||
### TypeScript Definitions
|
|
||||||
|
|
||||||
The axios TypeScript definitions have been updated to match the axios API and use the ES2015 module syntax.
|
|
||||||
|
|
||||||
Please use the following `import` statement to import axios in TypeScript:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import axios from 'axios';
|
|
||||||
|
|
||||||
axios.get('/foo')
|
|
||||||
.then(response => console.log(response))
|
|
||||||
.catch(error => console.log(error));
|
|
||||||
```
|
|
||||||
|
|
||||||
### `agent` Config Option
|
|
||||||
|
|
||||||
The `agent` config option has been replaced with two new options: `httpAgent` and `httpsAgent`. Please use them instead.
|
|
||||||
|
|
||||||
```js
|
|
||||||
{
|
|
||||||
// Define a custom agent for HTTP
|
|
||||||
httpAgent: new http.Agent({ keepAlive: true }),
|
|
||||||
// Define a custom agent for HTTPS
|
|
||||||
httpsAgent: new https.Agent({ keepAlive: true })
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### `progress` Config Option
|
|
||||||
|
|
||||||
The `progress` config option has been replaced with the `onUploadProgress` and `onDownloadProgress` options.
|
|
||||||
|
|
||||||
```js
|
|
||||||
{
|
|
||||||
// Define a handler for upload progress events
|
|
||||||
onUploadProgress: function (progressEvent) {
|
|
||||||
// ...
|
|
||||||
},
|
|
||||||
|
|
||||||
// Define a handler for download progress events
|
|
||||||
onDownloadProgress: function (progressEvent) {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## 0.12.x -> 0.13.0
|
|
||||||
|
|
||||||
The `0.13.0` release contains several changes to custom adapters and error handling.
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
|
|
||||||
Previous to this release an error could either be a server response with bad status code or an actual `Error`. With this release Promise will always reject with an `Error`. In the case that a response was received, the `Error` will also include the response.
|
|
||||||
|
|
||||||
```js
|
|
||||||
axios.get('/user/12345')
|
|
||||||
.catch((error) => {
|
|
||||||
console.log(error.message);
|
|
||||||
console.log(error.code); // Not always specified
|
|
||||||
console.log(error.config); // The config that was used to make the request
|
|
||||||
console.log(error.response); // Only available if response was received from the server
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Request Adapters
|
|
||||||
|
|
||||||
This release changes a few things about how request adapters work. Please take note if you are using your own custom adapter.
|
|
||||||
|
|
||||||
1. Response transformer is now called outside of adapter.
|
|
||||||
2. Request adapter returns a `Promise`.
|
|
||||||
|
|
||||||
This means that you no longer need to invoke `transformData` on response data. You will also no longer receive `resolve` and `reject` as arguments in your adapter.
|
|
||||||
|
|
||||||
Previous code:
|
|
||||||
|
|
||||||
```js
|
|
||||||
function myAdapter(resolve, reject, config) {
|
|
||||||
var response = {
|
|
||||||
data: transformData(
|
|
||||||
responseData,
|
|
||||||
responseHeaders,
|
|
||||||
config.transformResponse
|
|
||||||
),
|
|
||||||
status: request.status,
|
|
||||||
statusText: request.statusText,
|
|
||||||
headers: responseHeaders
|
|
||||||
};
|
|
||||||
settle(resolve, reject, response);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
New code:
|
|
||||||
|
|
||||||
```js
|
|
||||||
function myAdapter(config) {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
var response = {
|
|
||||||
data: responseData,
|
|
||||||
status: request.status,
|
|
||||||
statusText: request.statusText,
|
|
||||||
headers: responseHeaders
|
|
||||||
};
|
|
||||||
settle(resolve, reject, response);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
See the related commits for more details:
|
|
||||||
|
|
||||||
- [Response transformers](https://github.com/axios/axios/commit/10eb23865101f9347570552c04e9d6211376e25e)
|
|
||||||
- [Request adapter Promise](https://github.com/axios/axios/commit/157efd5615890301824e3121cc6c9d2f9b21f94a)
|
|
||||||
|
|
||||||
## 0.5.x -> 0.6.0
|
|
||||||
|
|
||||||
The `0.6.0` release contains mostly bug fixes, but there are a couple things to be aware of when upgrading.
|
|
||||||
|
|
||||||
### ES6 Promise Polyfill
|
|
||||||
|
|
||||||
Up until the `0.6.0` release ES6 `Promise` was being polyfilled using [es6-promise](https://github.com/jakearchibald/es6-promise). With this release, the polyfill has been removed, and you will need to supply it yourself if your environment needs it.
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('es6-promise').polyfill();
|
|
||||||
var axios = require('axios');
|
|
||||||
```
|
|
||||||
|
|
||||||
This will polyfill the global environment, and only needs to be done once.
|
|
||||||
|
|
||||||
### `axios.success`/`axios.error`
|
|
||||||
|
|
||||||
The `success`, and `error` aliases were deprecated in [0.4.0](https://github.com/axios/axios/blob/master/CHANGELOG.md#040-oct-03-2014). As of this release they have been removed entirely. Instead please use `axios.then`, and `axios.catch` respectively.
|
|
||||||
|
|
||||||
```js
|
|
||||||
axios.get('some/url')
|
|
||||||
.then(function (res) {
|
|
||||||
/* ... */
|
|
||||||
})
|
|
||||||
.catch(function (err) {
|
|
||||||
/* ... */
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### UMD
|
|
||||||
|
|
||||||
Previous versions of axios shipped with an AMD, CommonJS, and Global build. This has all been rolled into a single UMD build.
|
|
||||||
|
|
||||||
```js
|
|
||||||
// AMD
|
|
||||||
require(['bower_components/axios/dist/axios'], function (axios) {
|
|
||||||
/* ... */
|
|
||||||
});
|
|
||||||
|
|
||||||
// CommonJS
|
|
||||||
var axios = require('axios/dist/axios');
|
|
||||||
```
|
|
||||||
19
node_modules/axios/bin/check-build-version.js
generated
vendored
19
node_modules/axios/bin/check-build-version.js
generated
vendored
@@ -1,19 +0,0 @@
|
|||||||
const fs = require('fs');
|
|
||||||
const assert = require('assert');
|
|
||||||
const axios = require('../index.js');
|
|
||||||
|
|
||||||
const {version} = JSON.parse(fs.readFileSync('./package.json'));
|
|
||||||
|
|
||||||
console.log('Checking versions...\n----------------------------')
|
|
||||||
|
|
||||||
console.log(`Package version: v${version}`);
|
|
||||||
console.log(`Axios version: v${axios.VERSION}`);
|
|
||||||
console.log(`----------------------------`);
|
|
||||||
|
|
||||||
assert.strictEqual(
|
|
||||||
version,
|
|
||||||
axios.VERSION,
|
|
||||||
`Version mismatch between package and Axios ${version} != ${axios.VERSION}`
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('✔️ PASSED\n');
|
|
||||||
22
node_modules/axios/bin/ssl_hotfix.js
generated
vendored
22
node_modules/axios/bin/ssl_hotfix.js
generated
vendored
@@ -1,22 +0,0 @@
|
|||||||
const {spawn} = require('child_process');
|
|
||||||
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
|
|
||||||
console.log(`Running ${args.join(' ')} on ${process.version}\n`);
|
|
||||||
|
|
||||||
const match = /v(\d+)/.exec(process.version);
|
|
||||||
|
|
||||||
const isHotfixNeeded = match && match[1] > 16;
|
|
||||||
|
|
||||||
isHotfixNeeded && console.warn('Setting --openssl-legacy-provider as ssl hotfix');
|
|
||||||
|
|
||||||
const test = spawn('cross-env',
|
|
||||||
isHotfixNeeded ? ['NODE_OPTIONS=--openssl-legacy-provider', ...args] : args, {
|
|
||||||
shell: true,
|
|
||||||
stdio: 'inherit'
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
test.on('exit', function (code) {
|
|
||||||
process.exit(code)
|
|
||||||
})
|
|
||||||
2377
node_modules/axios/dist/axios.js
generated
vendored
2377
node_modules/axios/dist/axios.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/axios/dist/axios.js.map
generated
vendored
1
node_modules/axios/dist/axios.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/axios/dist/axios.min.js
generated
vendored
2
node_modules/axios/dist/axios.min.js
generated
vendored
File diff suppressed because one or more lines are too long
1
node_modules/axios/dist/axios.min.js.map
generated
vendored
1
node_modules/axios/dist/axios.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2369
node_modules/axios/dist/esm/axios.js
generated
vendored
2369
node_modules/axios/dist/esm/axios.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
node_modules/axios/dist/esm/axios.js.map
generated
vendored
1
node_modules/axios/dist/esm/axios.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/axios/dist/esm/axios.min.js
generated
vendored
2
node_modules/axios/dist/esm/axios.min.js
generated
vendored
File diff suppressed because one or more lines are too long
1
node_modules/axios/dist/esm/axios.min.js.map
generated
vendored
1
node_modules/axios/dist/esm/axios.min.js.map
generated
vendored
File diff suppressed because one or more lines are too long
340
node_modules/axios/index.d.ts
generated
vendored
340
node_modules/axios/index.d.ts
generated
vendored
@@ -1,340 +0,0 @@
|
|||||||
// TypeScript Version: 4.1
|
|
||||||
type HeaderValue = string | string[] | number | boolean;
|
|
||||||
|
|
||||||
type AxiosHeaders = Record<string, HeaderValue | Record<Method & CommonHeaders, HeaderValue>>;
|
|
||||||
|
|
||||||
type MethodsHeaders = {
|
|
||||||
[Key in Method as Lowercase<Key>]: AxiosHeaders;
|
|
||||||
};
|
|
||||||
|
|
||||||
interface CommonHeaders {
|
|
||||||
common: AxiosHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type AxiosRequestHeaders = Partial<AxiosHeaders & MethodsHeaders & CommonHeaders>;
|
|
||||||
|
|
||||||
export type AxiosResponseHeaders = Partial<Record<string, string>> & {
|
|
||||||
"set-cookie"?: string[]
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface AxiosRequestTransformer {
|
|
||||||
(data: any, headers: AxiosRequestHeaders): any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosResponseTransformer {
|
|
||||||
(data: any, headers?: AxiosResponseHeaders, status?: number): any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosAdapter {
|
|
||||||
(config: AxiosRequestConfig): AxiosPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosBasicCredentials {
|
|
||||||
username: string;
|
|
||||||
password: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosProxyConfig {
|
|
||||||
host: string;
|
|
||||||
port: number;
|
|
||||||
auth?: {
|
|
||||||
username: string;
|
|
||||||
password: string;
|
|
||||||
};
|
|
||||||
protocol?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type Method =
|
|
||||||
| 'get' | 'GET'
|
|
||||||
| 'delete' | 'DELETE'
|
|
||||||
| 'head' | 'HEAD'
|
|
||||||
| 'options' | 'OPTIONS'
|
|
||||||
| 'post' | 'POST'
|
|
||||||
| 'put' | 'PUT'
|
|
||||||
| 'patch' | 'PATCH'
|
|
||||||
| 'purge' | 'PURGE'
|
|
||||||
| 'link' | 'LINK'
|
|
||||||
| 'unlink' | 'UNLINK';
|
|
||||||
|
|
||||||
export type ResponseType =
|
|
||||||
| 'arraybuffer'
|
|
||||||
| 'blob'
|
|
||||||
| 'document'
|
|
||||||
| 'json'
|
|
||||||
| 'text'
|
|
||||||
| 'stream';
|
|
||||||
|
|
||||||
export type responseEncoding =
|
|
||||||
| 'ascii' | 'ASCII'
|
|
||||||
| 'ansi' | 'ANSI'
|
|
||||||
| 'binary' | 'BINARY'
|
|
||||||
| 'base64' | 'BASE64'
|
|
||||||
| 'base64url' | 'BASE64URL'
|
|
||||||
| 'hex' | 'HEX'
|
|
||||||
| 'latin1' | 'LATIN1'
|
|
||||||
| 'ucs-2' | 'UCS-2'
|
|
||||||
| 'ucs2' | 'UCS2'
|
|
||||||
| 'utf-8' | 'UTF-8'
|
|
||||||
| 'utf8' | 'UTF8'
|
|
||||||
| 'utf16le' | 'UTF16LE';
|
|
||||||
|
|
||||||
export interface TransitionalOptions {
|
|
||||||
silentJSONParsing?: boolean;
|
|
||||||
forcedJSONParsing?: boolean;
|
|
||||||
clarifyTimeoutError?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GenericAbortSignal {
|
|
||||||
aborted: boolean;
|
|
||||||
onabort: ((...args: any) => any) | null;
|
|
||||||
addEventListener: (...args: any) => any;
|
|
||||||
removeEventListener: (...args: any) => any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FormDataVisitorHelpers {
|
|
||||||
defaultVisitor: SerializerVisitor;
|
|
||||||
convertValue: (value: any) => any;
|
|
||||||
isVisitable: (value: any) => boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SerializerVisitor {
|
|
||||||
(
|
|
||||||
this: GenericFormData,
|
|
||||||
value: any,
|
|
||||||
key: string | number,
|
|
||||||
path: null | Array<string | number>,
|
|
||||||
helpers: FormDataVisitorHelpers
|
|
||||||
): boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SerializerOptions {
|
|
||||||
visitor?: SerializerVisitor;
|
|
||||||
dots?: boolean;
|
|
||||||
metaTokens?: boolean;
|
|
||||||
indexes?: boolean | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// tslint:disable-next-line
|
|
||||||
export interface FormSerializerOptions extends SerializerOptions {
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ParamEncoder {
|
|
||||||
(value: any, defaultEncoder: (value: any) => any): any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CustomParamsSerializer {
|
|
||||||
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ParamsSerializerOptions extends SerializerOptions {
|
|
||||||
encode?: ParamEncoder;
|
|
||||||
serialize?: CustomParamsSerializer;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosRequestConfig<D = any> {
|
|
||||||
url?: string;
|
|
||||||
method?: Method | string;
|
|
||||||
baseURL?: string;
|
|
||||||
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
|
|
||||||
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
|
|
||||||
headers?: AxiosRequestHeaders;
|
|
||||||
params?: any;
|
|
||||||
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
|
|
||||||
data?: D;
|
|
||||||
timeout?: number;
|
|
||||||
timeoutErrorMessage?: string;
|
|
||||||
withCredentials?: boolean;
|
|
||||||
adapter?: AxiosAdapter;
|
|
||||||
auth?: AxiosBasicCredentials;
|
|
||||||
responseType?: ResponseType;
|
|
||||||
responseEncoding?: responseEncoding | string;
|
|
||||||
xsrfCookieName?: string;
|
|
||||||
xsrfHeaderName?: string;
|
|
||||||
onUploadProgress?: (progressEvent: ProgressEvent) => void;
|
|
||||||
onDownloadProgress?: (progressEvent: ProgressEvent) => void;
|
|
||||||
maxContentLength?: number;
|
|
||||||
validateStatus?: ((status: number) => boolean) | null;
|
|
||||||
maxBodyLength?: number;
|
|
||||||
maxRedirects?: number;
|
|
||||||
beforeRedirect?: (options: Record<string, any>, responseDetails: {headers: Record<string, string>}) => void;
|
|
||||||
socketPath?: string | null;
|
|
||||||
httpAgent?: any;
|
|
||||||
httpsAgent?: any;
|
|
||||||
proxy?: AxiosProxyConfig | false;
|
|
||||||
cancelToken?: CancelToken;
|
|
||||||
decompress?: boolean;
|
|
||||||
transitional?: TransitionalOptions;
|
|
||||||
signal?: GenericAbortSignal;
|
|
||||||
insecureHTTPParser?: boolean;
|
|
||||||
env?: {
|
|
||||||
FormData?: new (...args: any[]) => object;
|
|
||||||
};
|
|
||||||
formSerializer?: FormSerializerOptions;
|
|
||||||
withXSRFToken?: boolean | ((config: AxiosRequestConfig) => boolean | undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface HeadersDefaults {
|
|
||||||
common: AxiosRequestHeaders;
|
|
||||||
delete: AxiosRequestHeaders;
|
|
||||||
get: AxiosRequestHeaders;
|
|
||||||
head: AxiosRequestHeaders;
|
|
||||||
post: AxiosRequestHeaders;
|
|
||||||
put: AxiosRequestHeaders;
|
|
||||||
patch: AxiosRequestHeaders;
|
|
||||||
options?: AxiosRequestHeaders;
|
|
||||||
purge?: AxiosRequestHeaders;
|
|
||||||
link?: AxiosRequestHeaders;
|
|
||||||
unlink?: AxiosRequestHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
|
|
||||||
headers: HeadersDefaults;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CreateAxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
|
|
||||||
headers?: AxiosRequestHeaders | Partial<HeadersDefaults>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosResponse<T = any, D = any> {
|
|
||||||
data: T;
|
|
||||||
status: number;
|
|
||||||
statusText: string;
|
|
||||||
headers: AxiosResponseHeaders;
|
|
||||||
config: AxiosRequestConfig<D>;
|
|
||||||
request?: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AxiosError<T = unknown, D = any> extends Error {
|
|
||||||
constructor(
|
|
||||||
message?: string,
|
|
||||||
code?: string,
|
|
||||||
config?: AxiosRequestConfig<D>,
|
|
||||||
request?: any,
|
|
||||||
response?: AxiosResponse<T, D>
|
|
||||||
);
|
|
||||||
|
|
||||||
config?: AxiosRequestConfig<D>;
|
|
||||||
code?: string;
|
|
||||||
request?: any;
|
|
||||||
response?: AxiosResponse<T, D>;
|
|
||||||
isAxiosError: boolean;
|
|
||||||
status?: number;
|
|
||||||
toJSON: () => object;
|
|
||||||
cause?: Error;
|
|
||||||
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
|
|
||||||
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
|
|
||||||
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
|
|
||||||
static readonly ERR_NETWORK = "ERR_NETWORK";
|
|
||||||
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
|
|
||||||
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
|
|
||||||
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
|
|
||||||
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
|
|
||||||
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
|
|
||||||
static readonly ERR_CANCELED = "ERR_CANCELED";
|
|
||||||
static readonly ECONNABORTED = "ECONNABORTED";
|
|
||||||
static readonly ETIMEDOUT = "ETIMEDOUT";
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CanceledError<T> extends AxiosError<T> {
|
|
||||||
}
|
|
||||||
|
|
||||||
export type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
|
|
||||||
|
|
||||||
export interface CancelStatic {
|
|
||||||
new (message?: string): Cancel;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Cancel {
|
|
||||||
message: string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Canceler {
|
|
||||||
(message?: string, config?: AxiosRequestConfig, request?: any): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CancelTokenStatic {
|
|
||||||
new (executor: (cancel: Canceler) => void): CancelToken;
|
|
||||||
source(): CancelTokenSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CancelToken {
|
|
||||||
promise: Promise<Cancel>;
|
|
||||||
reason?: Cancel;
|
|
||||||
throwIfRequested(): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CancelTokenSource {
|
|
||||||
token: CancelToken;
|
|
||||||
cancel: Canceler;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosInterceptorOptions {
|
|
||||||
synchronous?: boolean;
|
|
||||||
runWhen?: (config: AxiosRequestConfig) => boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosInterceptorManager<V> {
|
|
||||||
use<T = V>(onFulfilled?: (value: V) => T | Promise<T>, onRejected?: (error: any) => any, options?: AxiosInterceptorOptions): number;
|
|
||||||
eject(id: number): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Axios {
|
|
||||||
constructor(config?: AxiosRequestConfig);
|
|
||||||
defaults: AxiosDefaults;
|
|
||||||
interceptors: {
|
|
||||||
request: AxiosInterceptorManager<AxiosRequestConfig>;
|
|
||||||
response: AxiosInterceptorManager<AxiosResponse>;
|
|
||||||
};
|
|
||||||
getUri(config?: AxiosRequestConfig): string;
|
|
||||||
request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
get<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
delete<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
head<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
options<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
post<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
put<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
patch<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
postForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
putForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
patchForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosInstance extends Axios {
|
|
||||||
<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): AxiosPromise<R>;
|
|
||||||
<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): AxiosPromise<R>;
|
|
||||||
|
|
||||||
defaults: Omit<AxiosDefaults, 'headers'> & {
|
|
||||||
headers: HeadersDefaults & {
|
|
||||||
[key: string]: string | number | boolean | undefined
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GenericFormData {
|
|
||||||
append(name: string, value: any, options?: any): any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GenericHTMLFormElement {
|
|
||||||
name: string;
|
|
||||||
method: string;
|
|
||||||
submit(): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AxiosStatic extends AxiosInstance {
|
|
||||||
create(config?: CreateAxiosDefaults): AxiosInstance;
|
|
||||||
Cancel: CancelStatic;
|
|
||||||
CancelToken: CancelTokenStatic;
|
|
||||||
Axios: typeof Axios;
|
|
||||||
AxiosError: typeof AxiosError;
|
|
||||||
readonly VERSION: string;
|
|
||||||
isCancel(value: any): value is Cancel;
|
|
||||||
all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
|
|
||||||
spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
|
|
||||||
isAxiosError<T = any, D = any>(payload: any): payload is AxiosError<T, D>;
|
|
||||||
toFormData(sourceObj: object, targetFormData?: GenericFormData, options?: FormSerializerOptions): GenericFormData;
|
|
||||||
formToJSON(form: GenericFormData|GenericHTMLFormElement): object;
|
|
||||||
}
|
|
||||||
|
|
||||||
declare const axios: AxiosStatic;
|
|
||||||
|
|
||||||
export default axios;
|
|
||||||
1
node_modules/axios/index.js
generated
vendored
1
node_modules/axios/index.js
generated
vendored
@@ -1 +0,0 @@
|
|||||||
module.exports = require('./lib/axios');
|
|
||||||
37
node_modules/axios/lib/adapters/README.md
generated
vendored
37
node_modules/axios/lib/adapters/README.md
generated
vendored
@@ -1,37 +0,0 @@
|
|||||||
# axios // adapters
|
|
||||||
|
|
||||||
The modules under `adapters/` are modules that handle dispatching a request and settling a returned `Promise` once a response is received.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```js
|
|
||||||
var settle = require('./../core/settle');
|
|
||||||
|
|
||||||
module.exports = function myAdapter(config) {
|
|
||||||
// At this point:
|
|
||||||
// - config has been merged with defaults
|
|
||||||
// - request transformers have already run
|
|
||||||
// - request interceptors have already run
|
|
||||||
|
|
||||||
// Make the request using config provided
|
|
||||||
// Upon response settle the Promise
|
|
||||||
|
|
||||||
return new Promise(function(resolve, reject) {
|
|
||||||
|
|
||||||
var response = {
|
|
||||||
data: responseData,
|
|
||||||
status: request.status,
|
|
||||||
statusText: request.statusText,
|
|
||||||
headers: responseHeaders,
|
|
||||||
config: config,
|
|
||||||
request: request
|
|
||||||
};
|
|
||||||
|
|
||||||
settle(resolve, reject, response);
|
|
||||||
|
|
||||||
// From here:
|
|
||||||
// - response transformers will run
|
|
||||||
// - response interceptors will run
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
463
node_modules/axios/lib/adapters/http.js
generated
vendored
463
node_modules/axios/lib/adapters/http.js
generated
vendored
@@ -1,463 +0,0 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
var utils = require('./../utils');
|
|
||||||
var settle = require('./../core/settle');
|
|
||||||
var buildFullPath = require('../core/buildFullPath');
|
|
||||||
var buildURL = require('./../helpers/buildURL');
|
|
||||||
var getProxyForUrl = require('proxy-from-env').getProxyForUrl;
|
|
||||||
var http = require('http');
|
|
||||||
var https = require('https');
|
|
||||||
var httpFollow = require('follow-redirects/http');
|
|
||||||
var httpsFollow = require('follow-redirects/https');
|
|
||||||
var url = require('url');
|
|
||||||
var zlib = require('zlib');
|
|
||||||
var VERSION = require('./../env/data').version;
|
|
||||||
var transitionalDefaults = require('../defaults/transitional');
|
|
||||||
var AxiosError = require('../core/AxiosError');
|
|
||||||
var CanceledError = require('../cancel/CanceledError');
|
|
||||||
var platform = require('../platform');
|
|
||||||
var fromDataURI = require('../helpers/fromDataURI');
|
|
||||||
var stream = require('stream');
|
|
||||||
|
|
||||||
var isHttps = /https:?/;
|
|
||||||
|
|
||||||
var supportedProtocols = platform.protocols.map(function(protocol) {
|
|
||||||
return protocol + ':';
|
|
||||||
});
|
|
||||||
|
|
||||||
function dispatchBeforeRedirect(options) {
|
|
||||||
if (options.beforeRedirects.proxy) {
|
|
||||||
options.beforeRedirects.proxy(options);
|
|
||||||
}
|
|
||||||
if (options.beforeRedirects.config) {
|
|
||||||
options.beforeRedirects.config(options);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param {http.ClientRequestArgs} options
|
|
||||||
* @param {AxiosProxyConfig} configProxy
|
|
||||||
* @param {string} location
|
|
||||||
*/
|
|
||||||
function setProxy(options, configProxy, location) {
|
|
||||||
var proxy = configProxy;
|
|
||||||
if (!proxy && proxy !== false) {
|
|
||||||
var proxyUrl = getProxyForUrl(location);
|
|
||||||
if (proxyUrl) {
|
|
||||||
proxy = url.parse(proxyUrl);
|
|
||||||
// replace 'host' since the proxy object is not a URL object
|
|
||||||
proxy.host = proxy.hostname;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (proxy) {
|
|
||||||
// Basic proxy authorization
|
|
||||||
if (proxy.auth) {
|
|
||||||
// Support proxy auth object form
|
|
||||||
if (proxy.auth.username || proxy.auth.password) {
|
|
||||||
proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || '');
|
|
||||||
}
|
|
||||||
var base64 = Buffer
|
|
||||||
.from(proxy.auth, 'utf8')
|
|
||||||
.toString('base64');
|
|
||||||
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
|
|
||||||
}
|
|
||||||
|
|
||||||
options.headers.host = options.hostname + (options.port ? ':' + options.port : '');
|
|
||||||
options.hostname = proxy.host;
|
|
||||||
options.host = proxy.host;
|
|
||||||
options.port = proxy.port;
|
|
||||||
options.path = location;
|
|
||||||
if (proxy.protocol) {
|
|
||||||
options.protocol = proxy.protocol;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) {
|
|
||||||
// Configure proxy for redirected request, passing the original config proxy to apply
|
|
||||||
// the exact same logic as if the redirected request was performed by axios directly.
|
|
||||||
setProxy(redirectOptions, configProxy, redirectOptions.href);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/*eslint consistent-return:0*/
|
|
||||||
module.exports = function httpAdapter(config) {
|
|
||||||
return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) {
|
|
||||||
var onCanceled;
|
|
||||||
function done() {
|
|
||||||
if (config.cancelToken) {
|
|
||||||
config.cancelToken.unsubscribe(onCanceled);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.signal) {
|
|
||||||
config.signal.removeEventListener('abort', onCanceled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var resolve = function resolve(value) {
|
|
||||||
done();
|
|
||||||
resolvePromise(value);
|
|
||||||
};
|
|
||||||
var rejected = false;
|
|
||||||
var reject = function reject(value) {
|
|
||||||
done();
|
|
||||||
rejected = true;
|
|
||||||
rejectPromise(value);
|
|
||||||
};
|
|
||||||
var data = config.data;
|
|
||||||
var responseType = config.responseType;
|
|
||||||
var responseEncoding = config.responseEncoding;
|
|
||||||
var method = config.method.toUpperCase();
|
|
||||||
|
|
||||||
// Parse url
|
|
||||||
var fullPath = buildFullPath(config.baseURL, config.url);
|
|
||||||
var parsed = url.parse(fullPath);
|
|
||||||
var protocol = parsed.protocol || supportedProtocols[0];
|
|
||||||
|
|
||||||
if (protocol === 'data:') {
|
|
||||||
var convertedData;
|
|
||||||
|
|
||||||
if (method !== 'GET') {
|
|
||||||
return settle(resolve, reject, {
|
|
||||||
status: 405,
|
|
||||||
statusText: 'method not allowed',
|
|
||||||
headers: {},
|
|
||||||
config: config
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
convertedData = fromDataURI(config.url, responseType === 'blob', {
|
|
||||||
Blob: config.env && config.env.Blob
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (responseType === 'text') {
|
|
||||||
convertedData = convertedData.toString(responseEncoding);
|
|
||||||
|
|
||||||
if (!responseEncoding || responseEncoding === 'utf8') {
|
|
||||||
data = utils.stripBOM(convertedData);
|
|
||||||
}
|
|
||||||
} else if (responseType === 'stream') {
|
|
||||||
convertedData = stream.Readable.from(convertedData);
|
|
||||||
}
|
|
||||||
|
|
||||||
return settle(resolve, reject, {
|
|
||||||
data: convertedData,
|
|
||||||
status: 200,
|
|
||||||
statusText: 'OK',
|
|
||||||
headers: {},
|
|
||||||
config: config
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (supportedProtocols.indexOf(protocol) === -1) {
|
|
||||||
return reject(new AxiosError(
|
|
||||||
'Unsupported protocol ' + protocol,
|
|
||||||
AxiosError.ERR_BAD_REQUEST,
|
|
||||||
config
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
var headers = config.headers;
|
|
||||||
var headerNames = {};
|
|
||||||
|
|
||||||
Object.keys(headers).forEach(function storeLowerName(name) {
|
|
||||||
headerNames[name.toLowerCase()] = name;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set User-Agent (required by some servers)
|
|
||||||
// See https://github.com/axios/axios/issues/69
|
|
||||||
if ('user-agent' in headerNames) {
|
|
||||||
// User-Agent is specified; handle case where no UA header is desired
|
|
||||||
if (!headers[headerNames['user-agent']]) {
|
|
||||||
delete headers[headerNames['user-agent']];
|
|
||||||
}
|
|
||||||
// Otherwise, use specified value
|
|
||||||
} else {
|
|
||||||
// Only set header if it hasn't been set in config
|
|
||||||
headers['User-Agent'] = 'axios/' + VERSION;
|
|
||||||
}
|
|
||||||
|
|
||||||
// support for https://www.npmjs.com/package/form-data api
|
|
||||||
if (utils.isFormData(data) && utils.isFunction(data.getHeaders)) {
|
|
||||||
Object.assign(headers, data.getHeaders());
|
|
||||||
} else if (data && !utils.isStream(data)) {
|
|
||||||
if (Buffer.isBuffer(data)) {
|
|
||||||
// Nothing to do...
|
|
||||||
} else if (utils.isArrayBuffer(data)) {
|
|
||||||
data = Buffer.from(new Uint8Array(data));
|
|
||||||
} else if (utils.isString(data)) {
|
|
||||||
data = Buffer.from(data, 'utf-8');
|
|
||||||
} else {
|
|
||||||
return reject(new AxiosError(
|
|
||||||
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
|
|
||||||
AxiosError.ERR_BAD_REQUEST,
|
|
||||||
config
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) {
|
|
||||||
return reject(new AxiosError(
|
|
||||||
'Request body larger than maxBodyLength limit',
|
|
||||||
AxiosError.ERR_BAD_REQUEST,
|
|
||||||
config
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add Content-Length header if data exists
|
|
||||||
if (!headerNames['content-length']) {
|
|
||||||
headers['Content-Length'] = data.length;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTTP basic authentication
|
|
||||||
var auth = undefined;
|
|
||||||
if (config.auth) {
|
|
||||||
var username = config.auth.username || '';
|
|
||||||
var password = config.auth.password || '';
|
|
||||||
auth = username + ':' + password;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!auth && parsed.auth) {
|
|
||||||
var urlAuth = parsed.auth.split(':');
|
|
||||||
var urlUsername = urlAuth[0] || '';
|
|
||||||
var urlPassword = urlAuth[1] || '';
|
|
||||||
auth = urlUsername + ':' + urlPassword;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (auth && headerNames.authorization) {
|
|
||||||
delete headers[headerNames.authorization];
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, '');
|
|
||||||
} catch (err) {
|
|
||||||
var customErr = new Error(err.message);
|
|
||||||
customErr.config = config;
|
|
||||||
customErr.url = config.url;
|
|
||||||
customErr.exists = true;
|
|
||||||
reject(customErr);
|
|
||||||
}
|
|
||||||
|
|
||||||
var options = {
|
|
||||||
path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''),
|
|
||||||
method: method,
|
|
||||||
headers: headers,
|
|
||||||
agents: { http: config.httpAgent, https: config.httpsAgent },
|
|
||||||
auth: auth,
|
|
||||||
protocol: protocol,
|
|
||||||
beforeRedirect: dispatchBeforeRedirect,
|
|
||||||
beforeRedirects: {}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (config.socketPath) {
|
|
||||||
options.socketPath = config.socketPath;
|
|
||||||
} else {
|
|
||||||
options.hostname = parsed.hostname;
|
|
||||||
options.port = parsed.port;
|
|
||||||
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
var transport;
|
|
||||||
var isHttpsRequest = isHttps.test(options.protocol);
|
|
||||||
options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent;
|
|
||||||
if (config.transport) {
|
|
||||||
transport = config.transport;
|
|
||||||
} else if (config.maxRedirects === 0) {
|
|
||||||
transport = isHttpsRequest ? https : http;
|
|
||||||
} else {
|
|
||||||
if (config.maxRedirects) {
|
|
||||||
options.maxRedirects = config.maxRedirects;
|
|
||||||
}
|
|
||||||
if (config.beforeRedirect) {
|
|
||||||
options.beforeRedirects.config = config.beforeRedirect;
|
|
||||||
}
|
|
||||||
transport = isHttpsRequest ? httpsFollow : httpFollow;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.maxBodyLength > -1) {
|
|
||||||
options.maxBodyLength = config.maxBodyLength;
|
|
||||||
} else {
|
|
||||||
// follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited
|
|
||||||
options.maxBodyLength = Infinity;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.insecureHTTPParser) {
|
|
||||||
options.insecureHTTPParser = config.insecureHTTPParser;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the request
|
|
||||||
var req = transport.request(options, function handleResponse(res) {
|
|
||||||
if (req.aborted) return;
|
|
||||||
|
|
||||||
// uncompress the response body transparently if required
|
|
||||||
var responseStream = res;
|
|
||||||
|
|
||||||
// return the last request in case of redirects
|
|
||||||
var lastRequest = res.req || req;
|
|
||||||
|
|
||||||
// if decompress disabled we should not decompress
|
|
||||||
if (config.decompress !== false) {
|
|
||||||
// if no content, but headers still say that it is encoded,
|
|
||||||
// remove the header not confuse downstream operations
|
|
||||||
if (data && data.length === 0 && res.headers['content-encoding']) {
|
|
||||||
delete res.headers['content-encoding'];
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (res.headers['content-encoding']) {
|
|
||||||
/*eslint default-case:0*/
|
|
||||||
case 'gzip':
|
|
||||||
case 'compress':
|
|
||||||
case 'deflate':
|
|
||||||
// add the unzipper to the body stream processing pipeline
|
|
||||||
responseStream = responseStream.pipe(zlib.createUnzip());
|
|
||||||
|
|
||||||
// remove the content-encoding in order to not confuse downstream operations
|
|
||||||
delete res.headers['content-encoding'];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var response = {
|
|
||||||
status: res.statusCode,
|
|
||||||
statusText: res.statusMessage,
|
|
||||||
headers: res.headers,
|
|
||||||
config: config,
|
|
||||||
request: lastRequest
|
|
||||||
};
|
|
||||||
|
|
||||||
if (responseType === 'stream') {
|
|
||||||
response.data = responseStream;
|
|
||||||
settle(resolve, reject, response);
|
|
||||||
} else {
|
|
||||||
var responseBuffer = [];
|
|
||||||
var totalResponseBytes = 0;
|
|
||||||
responseStream.on('data', function handleStreamData(chunk) {
|
|
||||||
responseBuffer.push(chunk);
|
|
||||||
totalResponseBytes += chunk.length;
|
|
||||||
|
|
||||||
// make sure the content length is not over the maxContentLength if specified
|
|
||||||
if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) {
|
|
||||||
// stream.destroy() emit aborted event before calling reject() on Node.js v16
|
|
||||||
rejected = true;
|
|
||||||
responseStream.destroy();
|
|
||||||
reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
|
||||||
AxiosError.ERR_BAD_RESPONSE, config, lastRequest));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
responseStream.on('aborted', function handlerStreamAborted() {
|
|
||||||
if (rejected) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
responseStream.destroy();
|
|
||||||
reject(new AxiosError(
|
|
||||||
'maxContentLength size of ' + config.maxContentLength + ' exceeded',
|
|
||||||
AxiosError.ERR_BAD_RESPONSE,
|
|
||||||
config,
|
|
||||||
lastRequest
|
|
||||||
));
|
|
||||||
});
|
|
||||||
|
|
||||||
responseStream.on('error', function handleStreamError(err) {
|
|
||||||
if (req.aborted) return;
|
|
||||||
reject(AxiosError.from(err, null, config, lastRequest));
|
|
||||||
});
|
|
||||||
|
|
||||||
responseStream.on('end', function handleStreamEnd() {
|
|
||||||
try {
|
|
||||||
var responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer);
|
|
||||||
if (responseType !== 'arraybuffer') {
|
|
||||||
responseData = responseData.toString(responseEncoding);
|
|
||||||
if (!responseEncoding || responseEncoding === 'utf8') {
|
|
||||||
responseData = utils.stripBOM(responseData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
response.data = responseData;
|
|
||||||
} catch (err) {
|
|
||||||
reject(AxiosError.from(err, null, config, response.request, response));
|
|
||||||
}
|
|
||||||
settle(resolve, reject, response);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Handle errors
|
|
||||||
req.on('error', function handleRequestError(err) {
|
|
||||||
// @todo remove
|
|
||||||
// if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return;
|
|
||||||
reject(AxiosError.from(err, null, config, req));
|
|
||||||
});
|
|
||||||
|
|
||||||
// set tcp keep alive to prevent drop connection by peer
|
|
||||||
req.on('socket', function handleRequestSocket(socket) {
|
|
||||||
// default interval of sending ack packet is 1 minute
|
|
||||||
socket.setKeepAlive(true, 1000 * 60);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Handle request timeout
|
|
||||||
if (config.timeout) {
|
|
||||||
// This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types.
|
|
||||||
var timeout = parseInt(config.timeout, 10);
|
|
||||||
|
|
||||||
if (isNaN(timeout)) {
|
|
||||||
reject(new AxiosError(
|
|
||||||
'error trying to parse `config.timeout` to int',
|
|
||||||
AxiosError.ERR_BAD_OPTION_VALUE,
|
|
||||||
config,
|
|
||||||
req
|
|
||||||
));
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system.
|
|
||||||
// And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET.
|
|
||||||
// At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up.
|
|
||||||
// And then these socket which be hang up will devouring CPU little by little.
|
|
||||||
// ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect.
|
|
||||||
req.setTimeout(timeout, function handleRequestTimeout() {
|
|
||||||
req.abort();
|
|
||||||
var timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
|
|
||||||
var transitional = config.transitional || transitionalDefaults;
|
|
||||||
if (config.timeoutErrorMessage) {
|
|
||||||
timeoutErrorMessage = config.timeoutErrorMessage;
|
|
||||||
}
|
|
||||||
reject(new AxiosError(
|
|
||||||
timeoutErrorMessage,
|
|
||||||
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
|
|
||||||
config,
|
|
||||||
req
|
|
||||||
));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.cancelToken || config.signal) {
|
|
||||||
// Handle cancellation
|
|
||||||
// eslint-disable-next-line func-names
|
|
||||||
onCanceled = function(cancel) {
|
|
||||||
if (req.aborted) return;
|
|
||||||
|
|
||||||
req.abort();
|
|
||||||
reject(!cancel || cancel.type ? new CanceledError(null, config, req) : cancel);
|
|
||||||
};
|
|
||||||
|
|
||||||
config.cancelToken && config.cancelToken.subscribe(onCanceled);
|
|
||||||
if (config.signal) {
|
|
||||||
config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
if (utils.isStream(data)) {
|
|
||||||
data.on('error', function handleStreamError(err) {
|
|
||||||
reject(AxiosError.from(err, config, null, req));
|
|
||||||
}).pipe(req);
|
|
||||||
} else {
|
|
||||||
req.end(data);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
226
node_modules/axios/lib/adapters/xhr.js
generated
vendored
226
node_modules/axios/lib/adapters/xhr.js
generated
vendored
@@ -1,226 +0,0 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
var utils = require('./../utils');
|
|
||||||
var settle = require('./../core/settle');
|
|
||||||
var cookies = require('./../helpers/cookies');
|
|
||||||
var buildURL = require('./../helpers/buildURL');
|
|
||||||
var buildFullPath = require('../core/buildFullPath');
|
|
||||||
var parseHeaders = require('./../helpers/parseHeaders');
|
|
||||||
var isURLSameOrigin = require('./../helpers/isURLSameOrigin');
|
|
||||||
var transitionalDefaults = require('../defaults/transitional');
|
|
||||||
var AxiosError = require('../core/AxiosError');
|
|
||||||
var CanceledError = require('../cancel/CanceledError');
|
|
||||||
var parseProtocol = require('../helpers/parseProtocol');
|
|
||||||
var platform = require('../platform');
|
|
||||||
|
|
||||||
module.exports = function xhrAdapter(config) {
|
|
||||||
return new Promise(function dispatchXhrRequest(resolve, reject) {
|
|
||||||
var requestData = config.data;
|
|
||||||
var requestHeaders = config.headers;
|
|
||||||
var responseType = config.responseType;
|
|
||||||
var withXSRFToken = config.withXSRFToken;
|
|
||||||
var onCanceled;
|
|
||||||
function done() {
|
|
||||||
if (config.cancelToken) {
|
|
||||||
config.cancelToken.unsubscribe(onCanceled);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.signal) {
|
|
||||||
config.signal.removeEventListener('abort', onCanceled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (utils.isFormData(requestData) && utils.isStandardBrowserEnv()) {
|
|
||||||
delete requestHeaders['Content-Type']; // Let the browser set it
|
|
||||||
}
|
|
||||||
|
|
||||||
var request = new XMLHttpRequest();
|
|
||||||
|
|
||||||
// HTTP basic authentication
|
|
||||||
if (config.auth) {
|
|
||||||
var username = config.auth.username || '';
|
|
||||||
var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : '';
|
|
||||||
requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password);
|
|
||||||
}
|
|
||||||
|
|
||||||
var fullPath = buildFullPath(config.baseURL, config.url);
|
|
||||||
|
|
||||||
request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true);
|
|
||||||
|
|
||||||
// Set the request timeout in MS
|
|
||||||
request.timeout = config.timeout;
|
|
||||||
|
|
||||||
function onloadend() {
|
|
||||||
if (!request) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Prepare the response
|
|
||||||
var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null;
|
|
||||||
var responseData = !responseType || responseType === 'text' || responseType === 'json' ?
|
|
||||||
request.responseText : request.response;
|
|
||||||
var response = {
|
|
||||||
data: responseData,
|
|
||||||
status: request.status,
|
|
||||||
statusText: request.statusText,
|
|
||||||
headers: responseHeaders,
|
|
||||||
config: config,
|
|
||||||
request: request
|
|
||||||
};
|
|
||||||
|
|
||||||
settle(function _resolve(value) {
|
|
||||||
resolve(value);
|
|
||||||
done();
|
|
||||||
}, function _reject(err) {
|
|
||||||
reject(err);
|
|
||||||
done();
|
|
||||||
}, response);
|
|
||||||
|
|
||||||
// Clean up request
|
|
||||||
request = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ('onloadend' in request) {
|
|
||||||
// Use onloadend if available
|
|
||||||
request.onloadend = onloadend;
|
|
||||||
} else {
|
|
||||||
// Listen for ready state to emulate onloadend
|
|
||||||
request.onreadystatechange = function handleLoad() {
|
|
||||||
if (!request || request.readyState !== 4) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The request errored out and we didn't get a response, this will be
|
|
||||||
// handled by onerror instead
|
|
||||||
// With one exception: request that using file: protocol, most browsers
|
|
||||||
// will return status as 0 even though it's a successful request
|
|
||||||
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// readystate handler is calling before onerror or ontimeout handlers,
|
|
||||||
// so we should call onloadend on the next 'tick'
|
|
||||||
setTimeout(onloadend);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle browser request cancellation (as opposed to a manual cancellation)
|
|
||||||
request.onabort = function handleAbort() {
|
|
||||||
if (!request) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request));
|
|
||||||
|
|
||||||
// Clean up request
|
|
||||||
request = null;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle low level network errors
|
|
||||||
request.onerror = function handleError() {
|
|
||||||
// Real errors are hidden from us by the browser
|
|
||||||
// onerror should only fire if it's a network error
|
|
||||||
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request));
|
|
||||||
|
|
||||||
// Clean up request
|
|
||||||
request = null;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Handle timeout
|
|
||||||
request.ontimeout = function handleTimeout() {
|
|
||||||
var timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
|
|
||||||
var transitional = config.transitional || transitionalDefaults;
|
|
||||||
if (config.timeoutErrorMessage) {
|
|
||||||
timeoutErrorMessage = config.timeoutErrorMessage;
|
|
||||||
}
|
|
||||||
reject(new AxiosError(
|
|
||||||
timeoutErrorMessage,
|
|
||||||
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
|
|
||||||
config,
|
|
||||||
request));
|
|
||||||
|
|
||||||
// Clean up request
|
|
||||||
request = null;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add xsrf header
|
|
||||||
// This is only done if running in a standard browser environment.
|
|
||||||
// Specifically not if we're in a web worker, or react-native.
|
|
||||||
if (utils.isStandardBrowserEnv()) {
|
|
||||||
// Add xsrf header
|
|
||||||
withXSRFToken && utils.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(config));
|
|
||||||
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(fullPath))) {
|
|
||||||
// Add xsrf header
|
|
||||||
var xsrfValue = config.xsrfHeaderName && config.xsrfCookieName && cookies.read(config.xsrfCookieName);
|
|
||||||
if (xsrfValue) {
|
|
||||||
requestHeaders[config.xsrfHeaderName] = xsrfValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add headers to the request
|
|
||||||
if ('setRequestHeader' in request) {
|
|
||||||
utils.forEach(requestHeaders, function setRequestHeader(val, key) {
|
|
||||||
if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') {
|
|
||||||
// Remove Content-Type if data is undefined
|
|
||||||
delete requestHeaders[key];
|
|
||||||
} else {
|
|
||||||
// Otherwise add header to the request
|
|
||||||
request.setRequestHeader(key, val);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add withCredentials to request if needed
|
|
||||||
if (!utils.isUndefined(config.withCredentials)) {
|
|
||||||
request.withCredentials = !!config.withCredentials;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add responseType to request if needed
|
|
||||||
if (responseType && responseType !== 'json') {
|
|
||||||
request.responseType = config.responseType;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle progress if needed
|
|
||||||
if (typeof config.onDownloadProgress === 'function') {
|
|
||||||
request.addEventListener('progress', config.onDownloadProgress);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not all browsers support upload events
|
|
||||||
if (typeof config.onUploadProgress === 'function' && request.upload) {
|
|
||||||
request.upload.addEventListener('progress', config.onUploadProgress);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.cancelToken || config.signal) {
|
|
||||||
// Handle cancellation
|
|
||||||
// eslint-disable-next-line func-names
|
|
||||||
onCanceled = function(cancel) {
|
|
||||||
if (!request) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel);
|
|
||||||
request.abort();
|
|
||||||
request = null;
|
|
||||||
};
|
|
||||||
|
|
||||||
config.cancelToken && config.cancelToken.subscribe(onCanceled);
|
|
||||||
if (config.signal) {
|
|
||||||
config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// false, 0 (zero number), and '' (empty string) are valid JSON values
|
|
||||||
if (!requestData && requestData !== false && requestData !== 0 && requestData !== '') {
|
|
||||||
requestData = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
var protocol = parseProtocol(fullPath);
|
|
||||||
|
|
||||||
if (protocol && platform.protocols.indexOf(protocol) === -1) {
|
|
||||||
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Send the request
|
|
||||||
request.send(requestData);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
68
node_modules/axios/lib/axios.js
generated
vendored
68
node_modules/axios/lib/axios.js
generated
vendored
@@ -1,68 +0,0 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
var utils = require('./utils');
|
|
||||||
var bind = require('./helpers/bind');
|
|
||||||
var Axios = require('./core/Axios');
|
|
||||||
var mergeConfig = require('./core/mergeConfig');
|
|
||||||
var defaults = require('./defaults');
|
|
||||||
var formDataToJSON = require('./helpers/formDataToJSON');
|
|
||||||
/**
|
|
||||||
* Create an instance of Axios
|
|
||||||
*
|
|
||||||
* @param {Object} defaultConfig The default config for the instance
|
|
||||||
* @return {Axios} A new instance of Axios
|
|
||||||
*/
|
|
||||||
function createInstance(defaultConfig) {
|
|
||||||
var context = new Axios(defaultConfig);
|
|
||||||
var instance = bind(Axios.prototype.request, context);
|
|
||||||
|
|
||||||
// Copy axios.prototype to instance
|
|
||||||
utils.extend(instance, Axios.prototype, context);
|
|
||||||
|
|
||||||
// Copy context to instance
|
|
||||||
utils.extend(instance, context);
|
|
||||||
|
|
||||||
// Factory for creating new instances
|
|
||||||
instance.create = function create(instanceConfig) {
|
|
||||||
return createInstance(mergeConfig(defaultConfig, instanceConfig));
|
|
||||||
};
|
|
||||||
|
|
||||||
return instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the default instance to be exported
|
|
||||||
var axios = createInstance(defaults);
|
|
||||||
|
|
||||||
// Expose Axios class to allow class inheritance
|
|
||||||
axios.Axios = Axios;
|
|
||||||
|
|
||||||
// Expose Cancel & CancelToken
|
|
||||||
axios.CanceledError = require('./cancel/CanceledError');
|
|
||||||
axios.CancelToken = require('./cancel/CancelToken');
|
|
||||||
axios.isCancel = require('./cancel/isCancel');
|
|
||||||
axios.VERSION = require('./env/data').version;
|
|
||||||
axios.toFormData = require('./helpers/toFormData');
|
|
||||||
|
|
||||||
// Expose AxiosError class
|
|
||||||
axios.AxiosError = require('../lib/core/AxiosError');
|
|
||||||
|
|
||||||
// alias for CanceledError for backward compatibility
|
|
||||||
axios.Cancel = axios.CanceledError;
|
|
||||||
|
|
||||||
// Expose all/spread
|
|
||||||
axios.all = function all(promises) {
|
|
||||||
return Promise.all(promises);
|
|
||||||
};
|
|
||||||
axios.spread = require('./helpers/spread');
|
|
||||||
|
|
||||||
// Expose isAxiosError
|
|
||||||
axios.isAxiosError = require('./helpers/isAxiosError');
|
|
||||||
|
|
||||||
axios.formToJSON = function(thing) {
|
|
||||||
return formDataToJSON(utils.isHTMLForm(thing) ? new FormData(thing) : thing);
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = axios;
|
|
||||||
|
|
||||||
// Allow use of default import syntax in TypeScript
|
|
||||||
module.exports.default = axios;
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user