Dep update

This commit is contained in:
Philipp Horstenkamp 2025-04-08 22:00:41 +02:00 committed by Philipp Horstenkamp
parent f79e41cbfe
commit 78549e06b4
966 changed files with 43637 additions and 158248 deletions

12
node_modules/.bin/detect-libc generated vendored
View File

@ -1,12 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../detect-libc/bin/detect-libc.js" "$@"
else
exec node "$basedir/../detect-libc/bin/detect-libc.js" "$@"
fi

17
node_modules/.bin/detect-libc.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\detect-libc\bin\detect-libc.js" %*

28
node_modules/.bin/detect-libc.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
} else {
& "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
} else {
& "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/mkdirp generated vendored
View File

@ -1,12 +0,0 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
else
exec node "$basedir/../mkdirp/bin/cmd.js" "$@"
fi

View File

@ -6,7 +6,7 @@ case `uname` in
esac esac
if [ -x "$basedir/node" ]; then if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver" "$@" exec "$basedir/node" "$basedir/../node-gyp-build/bin.js" "$@"
else else
exec node "$basedir/../semver/bin/semver" "$@" exec node "$basedir/../node-gyp-build/bin.js" "$@"
fi fi

View File

@ -6,7 +6,7 @@ case `uname` in
esac esac
if [ -x "$basedir/node" ]; then if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@" exec "$basedir/node" "$basedir/../node-gyp-build/optional.js" "$@"
else else
exec node "$basedir/../prebuild-install/bin.js" "$@" exec node "$basedir/../node-gyp-build/optional.js" "$@"
fi fi

View File

@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
SET PATHEXT=%PATHEXT:;.JS;=;% SET PATHEXT=%PATHEXT:;.JS;=;%
) )
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %* endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\optional.js" %*

View File

@ -11,17 +11,17 @@ $ret=0
if (Test-Path "$basedir/node$exe") { if (Test-Path "$basedir/node$exe") {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver" $args $input | & "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
} else { } else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver" $args & "$basedir/node$exe" "$basedir/../node-gyp-build/optional.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} else { } else {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver" $args $input | & "node$exe" "$basedir/../node-gyp-build/optional.js" $args
} else { } else {
& "node$exe" "$basedir/../semver/bin/semver" $args & "node$exe" "$basedir/../node-gyp-build/optional.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} }

View File

@ -6,7 +6,7 @@ case `uname` in
esac esac
if [ -x "$basedir/node" ]; then if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../rc/cli.js" "$@" exec "$basedir/node" "$basedir/../node-gyp-build/build-test.js" "$@"
else else
exec node "$basedir/../rc/cli.js" "$@" exec node "$basedir/../node-gyp-build/build-test.js" "$@"
fi fi

View File

@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
SET PATHEXT=%PATHEXT:;.JS;=;% SET PATHEXT=%PATHEXT:;.JS;=;%
) )
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %* endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\build-test.js" %*

View File

@ -11,17 +11,17 @@ $ret=0
if (Test-Path "$basedir/node$exe") { if (Test-Path "$basedir/node$exe") {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args $input | & "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} else { } else {
& "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args & "$basedir/node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} else { } else {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args $input | & "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} else { } else {
& "node$exe" "$basedir/../prebuild-install/bin.js" $args & "node$exe" "$basedir/../node-gyp-build/build-test.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} }

View File

@ -14,4 +14,4 @@ IF EXIST "%dp0%\node.exe" (
SET PATHEXT=%PATHEXT:;.JS;=;% SET PATHEXT=%PATHEXT:;.JS;=;%
) )
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver" %* endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\node-gyp-build\bin.js" %*

View File

@ -11,17 +11,17 @@ $ret=0
if (Test-Path "$basedir/node$exe") { if (Test-Path "$basedir/node$exe") {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args $input | & "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
} else { } else {
& "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args & "$basedir/node$exe" "$basedir/../node-gyp-build/bin.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} else { } else {
# Support pipeline input # Support pipeline input
if ($MyInvocation.ExpectingInput) { if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args $input | & "node$exe" "$basedir/../node-gyp-build/bin.js" $args
} else { } else {
& "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args & "node$exe" "$basedir/../node-gyp-build/bin.js" $args
} }
$ret=$LASTEXITCODE $ret=$LASTEXITCODE
} }

17
node_modules/.bin/rc.cmd generated vendored
View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %*

28
node_modules/.bin/rc.ps1 generated vendored
View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rc/cli.js" $args
} else {
& "node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

914
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

4
node_modules/ansi-regex/index.js generated vendored
View File

@ -1,4 +0,0 @@
'use strict';
module.exports = function () {
return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g;
};

21
node_modules/ansi-regex/license generated vendored
View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

64
node_modules/ansi-regex/package.json generated vendored
View File

@ -1,64 +0,0 @@
{
"name": "ansi-regex",
"version": "2.1.1",
"description": "Regular expression for matching ANSI escape codes",
"license": "MIT",
"repository": "chalk/ansi-regex",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"maintainers": [
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
"Joshua Appelman <jappelman@xebia.com> (jbnicolai.com)",
"JD Ballard <i.am.qix@gmail.com> (github.com/qix-)"
],
"engines": {
"node": ">=0.10.0"
},
"scripts": {
"test": "xo && ava --verbose",
"view-supported": "node fixtures/view-codes.js"
},
"files": [
"index.js"
],
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"devDependencies": {
"ava": "0.17.0",
"xo": "0.16.0"
},
"xo": {
"rules": {
"guard-for-in": 0,
"no-loop-func": 0
}
}
}

39
node_modules/ansi-regex/readme.md generated vendored
View File

@ -1,39 +0,0 @@
# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex)
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```
$ npm install --save ansi-regex
```
## Usage
```js
const ansiRegex = require('ansi-regex');
ansiRegex().test('\u001b[4mcake\u001b[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
//=> ['\u001b[4m', '\u001b[0m']
```
## FAQ
### Why do you test for codes not in the ECMA 48 standard?
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

14
node_modules/aproba/LICENSE generated vendored
View File

@ -1,14 +0,0 @@
Copyright (c) 2015, Rebecca Turner <me@re-becca.org>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

94
node_modules/aproba/README.md generated vendored
View File

@ -1,94 +0,0 @@
aproba
======
A ridiculously light-weight function argument validator
```
var validate = require("aproba")
function myfunc(a, b, c) {
// `a` must be a string, `b` a number, `c` a function
validate('SNF', arguments) // [a,b,c] is also valid
}
myfunc('test', 23, function () {}) // ok
myfunc(123, 23, function () {}) // type error
myfunc('test', 23) // missing arg error
myfunc('test', 23, function () {}, true) // too many args error
```
Valid types are:
| type | description
| :--: | :----------
| * | matches any type
| A | `Array.isArray` OR an `arguments` object
| S | typeof == string
| N | typeof == number
| F | typeof == function
| O | typeof == object and not type A and not type E
| B | typeof == boolean
| E | `instanceof Error` OR `null` **(special: see below)**
| Z | == `null`
Validation failures throw one of three exception types, distinguished by a
`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`.
If you pass in an invalid type then it will throw with a code of
`EUNKNOWNTYPE`.
If an **error** argument is found and is not null then the remaining
arguments are optional. That is, if you say `ESO` then that's like using a
non-magical `E` in: `E|ESO|ZSO`.
### But I have optional arguments?!
You can provide more than one signature by separating them with pipes `|`.
If any signature matches the arguments then they'll be considered valid.
So for example, say you wanted to write a signature for
`fs.createWriteStream`. The docs for it describe it thusly:
```
fs.createWriteStream(path[, options])
```
This would be a signature of `SO|S`. That is, a string and and object, or
just a string.
Now, if you read the full `fs` docs, you'll see that actually path can ALSO
be a buffer. And options can be a string, that is:
```
path <String> | <Buffer>
options <String> | <Object>
```
To reproduce this you have to fully enumerate all of the possible
combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The
awkwardness is a feature: It reminds you of the complexity you're adding to
your API when you do this sort of thing.
### Browser support
This has no dependencies and should work in browsers, though you'll have
noisier stack traces.
### Why this exists
I wanted a very simple argument validator. It needed to do two things:
1. Be more concise and easier to use than assertions
2. Not encourage an infinite bikeshed of DSLs
This is why types are specified by a single character and there's no such
thing as an optional argument.
This is not intended to validate user data. This is specifically about
asserting the interface of your functions.
If you need greater validation, I encourage you to write them by hand or
look elsewhere.

105
node_modules/aproba/index.js generated vendored
View File

@ -1,105 +0,0 @@
'use strict'
function isArguments (thingy) {
return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
}
var types = {
'*': {label: 'any', check: function () { return true }},
A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
Z: {label: 'null', check: function (thingy) { return thingy == null }}
}
function addSchema (schema, arity) {
var group = arity[schema.length] = arity[schema.length] || []
if (group.indexOf(schema) === -1) group.push(schema)
}
var validate = module.exports = function (rawSchemas, args) {
if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
if (!args) throw missingRequiredArg(1, 'args')
if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
if (!types.A.check(args)) throw invalidType(1, ['array'], args)
var schemas = rawSchemas.split('|')
var arity = {}
schemas.forEach(function (schema) {
for (var ii = 0; ii < schema.length; ++ii) {
var type = schema[ii]
if (!types[type]) throw unknownType(ii, type)
}
if (/E.*E/.test(schema)) throw moreThanOneError(schema)
addSchema(schema, arity)
if (/E/.test(schema)) {
addSchema(schema.replace(/E.*$/, 'E'), arity)
addSchema(schema.replace(/E/, 'Z'), arity)
if (schema.length === 1) addSchema('', arity)
}
})
var matching = arity[args.length]
if (!matching) {
throw wrongNumberOfArgs(Object.keys(arity), args.length)
}
for (var ii = 0; ii < args.length; ++ii) {
var newMatching = matching.filter(function (schema) {
var type = schema[ii]
var typeCheck = types[type].check
return typeCheck(args[ii])
})
if (!newMatching.length) {
var labels = matching.map(function (schema) {
return types[schema[ii]].label
}).filter(function (schema) { return schema != null })
throw invalidType(ii, labels, args[ii])
}
matching = newMatching
}
}
function missingRequiredArg (num) {
return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
}
function unknownType (num, type) {
return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
}
function invalidType (num, expectedTypes, value) {
var valueType
Object.keys(types).forEach(function (typeCode) {
if (types[typeCode].check(value)) valueType = types[typeCode].label
})
return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
englishList(expectedTypes) + ' but got ' + valueType)
}
function englishList (list) {
return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
}
function wrongNumberOfArgs (expected, got) {
var english = englishList(expected)
var args = expected.every(function (ex) { return ex.length === 1 })
? 'argument'
: 'arguments'
return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
}
function moreThanOneError (schema) {
return newException('ETOOMANYERRORTYPES',
'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
}
function newException (code, msg) {
var e = new Error(msg)
e.code = code
if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
return e
}

34
node_modules/aproba/package.json generated vendored
View File

@ -1,34 +0,0 @@
{
"name": "aproba",
"version": "1.2.0",
"description": "A ridiculously light-weight argument validator (now browser friendly)",
"main": "index.js",
"directories": {
"test": "test"
},
"dependencies": {},
"devDependencies": {
"standard": "^10.0.3",
"tap": "^10.0.2"
},
"files": [
"index.js"
],
"scripts": {
"test": "standard && tap -j3 test/*.js"
},
"repository": {
"type": "git",
"url": "https://github.com/iarna/aproba"
},
"keywords": [
"argument",
"validate"
],
"author": "Rebecca Turner <me@re-becca.org>",
"license": "ISC",
"bugs": {
"url": "https://github.com/iarna/aproba/issues"
},
"homepage": "https://github.com/iarna/aproba"
}

View File

@ -1,37 +0,0 @@
Hi, figured we could actually use a changelog now:
## 1.1.5 2018-05-24
* [#92](https://github.com/iarna/are-we-there-yet/pull/92) Fix bug where
`finish` would throw errors when including `TrackerStream` objects in
`TrackerGroup` collections. (@brianloveswords)
## 1.1.4 2017-04-21
* Fix typo in package.json
## 1.1.3 2017-04-21
* Improve documentation and limit files included in the distribution.
## 1.1.2 2016-03-15
* Add tracker group cycle detection and tests for it
## 1.1.1 2016-01-29
* Fix a typo in stream completion tracker
## 1.1.0 2016-01-29
* Rewrote completion percent computation to be low impact no more walking a
tree of completion groups every time we need this info. Previously, with
medium sized tree of completion groups, even a relatively modest number of
calls to the top level `completed()` method would result in absurd numbers
of calls overall as it walked down the tree. We now, instead, keep track as
we bubble up changes, so the computation is limited to when data changes and
to the depth of that one branch, instead of _every_ node. (Plus, we were already
incurring _this_ cost, since we already bubbled out changes.)
* Moved different tracker types out to their own files.
* Made tests test for TOO MANY events too.
* Standarized the source code formatting

View File

@ -1,5 +0,0 @@
Copyright (c) 2015, Rebecca Turner
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -1,195 +0,0 @@
are-we-there-yet
----------------
Track complex hiearchies of asynchronous task completion statuses. This is
intended to give you a way of recording and reporting the progress of the big
recursive fan-out and gather type workflows that are so common in async.
What you do with this completion data is up to you, but the most common use case is to
feed it to one of the many progress bar modules.
Most progress bar modules include a rudamentary version of this, but my
needs were more complex.
Usage
=====
```javascript
var TrackerGroup = require("are-we-there-yet").TrackerGroup
var top = new TrackerGroup("program")
var single = top.newItem("one thing", 100)
single.completeWork(20)
console.log(top.completed()) // 0.2
fs.stat("file", function(er, stat) {
if (er) throw er
var stream = top.newStream("file", stat.size)
console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete
// and 50% * 20% == 10%
fs.createReadStream("file").pipe(stream).on("data", function (chunk) {
// do stuff with chunk
})
top.on("change", function (name) {
// called each time a chunk is read from "file"
// top.completed() will start at 0.1 and fill up to 0.6 as the file is read
})
})
```
Shared Methods
==============
* var completed = tracker.completed()
Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream`
Returns the ratio of completed work to work to be done. Range of 0 to 1.
* tracker.finish()
Implemented in: `Tracker`, `TrackerGroup`
Marks the tracker as completed. With a TrackerGroup this marks all of its
components as completed.
Marks all of the components of this tracker as finished, which in turn means
that `tracker.completed()` for this will now be 1.
This will result in one or more `change` events being emitted.
Events
======
All tracker objects emit `change` events with the following arguments:
```
function (name, completed, tracker)
```
`name` is the name of the tracker that originally emitted the event,
or if it didn't have one, the first containing tracker group that had one.
`completed` is the percent complete (as returned by `tracker.completed()` method).
`tracker` is the tracker object that you are listening for events on.
TrackerGroup
============
* var tracker = new TrackerGroup(**name**)
* **name** *(optional)* - The name of this tracker group, used in change
notifications if the component updating didn't have a name. Defaults to undefined.
Creates a new empty tracker aggregation group. These are trackers whose
completion status is determined by the completion status of other trackers.
* tracker.addUnit(**otherTracker**, **weight**)
* **otherTracker** - Any of the other are-we-there-yet tracker objects
* **weight** *(optional)* - The weight to give the tracker, defaults to 1.
Adds the **otherTracker** to this aggregation group. The weight determines
how long you expect this tracker to take to complete in proportion to other
units. So for instance, if you add one tracker with a weight of 1 and
another with a weight of 2, you're saying the second will take twice as long
to complete as the first. As such, the first will account for 33% of the
completion of this tracker and the second will account for the other 67%.
Returns **otherTracker**.
* var subGroup = tracker.newGroup(**name**, **weight**)
The above is exactly equivalent to:
```javascript
var subGroup = tracker.addUnit(new TrackerGroup(name), weight)
```
* var subItem = tracker.newItem(**name**, **todo**, **weight**)
The above is exactly equivalent to:
```javascript
var subItem = tracker.addUnit(new Tracker(name, todo), weight)
```
* var subStream = tracker.newStream(**name**, **todo**, **weight**)
The above is exactly equivalent to:
```javascript
var subStream = tracker.addUnit(new TrackerStream(name, todo), weight)
```
* console.log( tracker.debug() )
Returns a tree showing the completion of this tracker group and all of its
children, including recursively entering all of the children.
Tracker
=======
* var tracker = new Tracker(**name**, **todo**)
* **name** *(optional)* The name of this counter to report in change
events. Defaults to undefined.
* **todo** *(optional)* The amount of work todo (a number). Defaults to 0.
Ordinarily these are constructed as a part of a tracker group (via
`newItem`).
* var completed = tracker.completed()
Returns the ratio of completed work to work to be done. Range of 0 to 1. If
total work to be done is 0 then it will return 0.
* tracker.addWork(**todo**)
* **todo** A number to add to the amount of work to be done.
Increases the amount of work to be done, thus decreasing the completion
percentage. Triggers a `change` event.
* tracker.completeWork(**completed**)
* **completed** A number to add to the work complete
Increase the amount of work complete, thus increasing the completion percentage.
Will never increase the work completed past the amount of work todo. That is,
percentages > 100% are not allowed. Triggers a `change` event.
* tracker.finish()
Marks this tracker as finished, tracker.completed() will now be 1. Triggers
a `change` event.
TrackerStream
=============
* var tracker = new TrackerStream(**name**, **size**, **options**)
* **name** *(optional)* The name of this counter to report in change
events. Defaults to undefined.
* **size** *(optional)* The number of bytes being sent through this stream.
* **options** *(optional)* A hash of stream options
The tracker stream object is a pass through stream that updates an internal
tracker object each time a block passes through. It's intended to track
downloads, file extraction and other related activities. You use it by piping
your data source into it and then using it as your data source.
If your data has a length attribute then that's used as the amount of work
completed when the chunk is passed through. If it does not (eg, object
streams) then each chunk counts as completing 1 unit of work, so your size
should be the total number of objects being streamed.
* tracker.addWork(**todo**)
* **todo** Increase the expected overall size by **todo** bytes.
Increases the amount of work to be done, thus decreasing the completion
percentage. Triggers a `change` event.

View File

@ -1,4 +0,0 @@
'use strict'
exports.TrackerGroup = require('./tracker-group.js')
exports.Tracker = require('./tracker.js')
exports.TrackerStream = require('./tracker-stream.js')

View File

@ -1,35 +0,0 @@
{
"name": "are-we-there-yet",
"version": "1.1.7",
"description": "Keep track of the overall completion of many disparate processes",
"main": "index.js",
"scripts": {
"test": "standard && tap test/*.js"
},
"repository": {
"type": "git",
"url": "https://github.com/iarna/are-we-there-yet.git"
},
"author": "Rebecca Turner (http://re-becca.org)",
"license": "ISC",
"bugs": {
"url": "https://github.com/iarna/are-we-there-yet/issues"
},
"homepage": "https://github.com/iarna/are-we-there-yet",
"devDependencies": {
"standard": "^11.0.1",
"tap": "^12.0.1"
},
"dependencies": {
"delegates": "^1.0.0",
"readable-stream": "^2.0.6"
},
"files": [
"index.js",
"tracker-base.js",
"tracker-group.js",
"tracker-stream.js",
"tracker.js",
"CHANGES.md"
]
}

View File

@ -1,11 +0,0 @@
'use strict'
var EventEmitter = require('events').EventEmitter
var util = require('util')
var trackerId = 0
var TrackerBase = module.exports = function (name) {
EventEmitter.call(this)
this.id = ++trackerId
this.name = name
}
util.inherits(TrackerBase, EventEmitter)

View File

@ -1,107 +0,0 @@
'use strict'
var util = require('util')
var TrackerBase = require('./tracker-base.js')
var Tracker = require('./tracker.js')
var TrackerStream = require('./tracker-stream.js')
var TrackerGroup = module.exports = function (name) {
TrackerBase.call(this, name)
this.parentGroup = null
this.trackers = []
this.completion = {}
this.weight = {}
this.totalWeight = 0
this.finished = false
this.bubbleChange = bubbleChange(this)
}
util.inherits(TrackerGroup, TrackerBase)
function bubbleChange (trackerGroup) {
return function (name, completed, tracker) {
trackerGroup.completion[tracker.id] = completed
if (trackerGroup.finished) return
trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup)
}
}
TrackerGroup.prototype.nameInTree = function () {
var names = []
var from = this
while (from) {
names.unshift(from.name)
from = from.parentGroup
}
return names.join('/')
}
TrackerGroup.prototype.addUnit = function (unit, weight) {
if (unit.addUnit) {
var toTest = this
while (toTest) {
if (unit === toTest) {
throw new Error(
'Attempted to add tracker group ' +
unit.name + ' to tree that already includes it ' +
this.nameInTree(this))
}
toTest = toTest.parentGroup
}
unit.parentGroup = this
}
this.weight[unit.id] = weight || 1
this.totalWeight += this.weight[unit.id]
this.trackers.push(unit)
this.completion[unit.id] = unit.completed()
unit.on('change', this.bubbleChange)
if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit)
return unit
}
TrackerGroup.prototype.completed = function () {
if (this.trackers.length === 0) return 0
var valPerWeight = 1 / this.totalWeight
var completed = 0
for (var ii = 0; ii < this.trackers.length; ii++) {
var trackerId = this.trackers[ii].id
completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId]
}
return completed
}
TrackerGroup.prototype.newGroup = function (name, weight) {
return this.addUnit(new TrackerGroup(name), weight)
}
TrackerGroup.prototype.newItem = function (name, todo, weight) {
return this.addUnit(new Tracker(name, todo), weight)
}
TrackerGroup.prototype.newStream = function (name, todo, weight) {
return this.addUnit(new TrackerStream(name, todo), weight)
}
TrackerGroup.prototype.finish = function () {
this.finished = true
if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)
for (var ii = 0; ii < this.trackers.length; ii++) {
var tracker = this.trackers[ii]
tracker.finish()
tracker.removeListener('change', this.bubbleChange)
}
this.emit('change', this.name, 1, this)
}
var buffer = ' '
TrackerGroup.prototype.debug = function (depth) {
depth = depth || 0
var indent = depth ? buffer.substr(0, depth) : ''
var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'
this.trackers.forEach(function (tracker) {
if (tracker instanceof TrackerGroup) {
output += tracker.debug(depth + 1)
} else {
output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n'
}
})
return output
}

View File

@ -1,36 +0,0 @@
'use strict'
var util = require('util')
var stream = require('readable-stream')
var delegate = require('delegates')
var Tracker = require('./tracker.js')
var TrackerStream = module.exports = function (name, size, options) {
stream.Transform.call(this, options)
this.tracker = new Tracker(name, size)
this.name = name
this.id = this.tracker.id
this.tracker.on('change', delegateChange(this))
}
util.inherits(TrackerStream, stream.Transform)
function delegateChange (trackerStream) {
return function (name, completion, tracker) {
trackerStream.emit('change', name, completion, trackerStream)
}
}
TrackerStream.prototype._transform = function (data, encoding, cb) {
this.tracker.completeWork(data.length ? data.length : 1)
this.push(data)
cb()
}
TrackerStream.prototype._flush = function (cb) {
this.tracker.finish()
cb()
}
delegate(TrackerStream.prototype, 'tracker')
.method('completed')
.method('addWork')
.method('finish')

View File

@ -1,30 +0,0 @@
'use strict'
var util = require('util')
var TrackerBase = require('./tracker-base.js')
var Tracker = module.exports = function (name, todo) {
TrackerBase.call(this, name)
this.workDone = 0
this.workTodo = todo || 0
}
util.inherits(Tracker, TrackerBase)
Tracker.prototype.completed = function () {
return this.workTodo === 0 ? 0 : this.workDone / this.workTodo
}
Tracker.prototype.addWork = function (work) {
this.workTodo += work
this.emit('change', this.name, this.completed(), this)
}
Tracker.prototype.completeWork = function (work) {
this.workDone += work
if (this.workDone > this.workTodo) this.workDone = this.workTodo
this.emit('change', this.name, this.completed(), this)
}
Tracker.prototype.finish = function () {
this.workTodo = this.workDone = 1
this.emit('change', this.name, 1, this)
}

View File

@ -1,2 +0,0 @@
coverage
.nyc_output

10
node_modules/async-limiter/.nycrc generated vendored
View File

@ -1,10 +0,0 @@
{
"check-coverage": false,
"lines": 99,
"statements": 99,
"functions": 99,
"branches": 99,
"include": [
"index.js"
]
}

View File

@ -1,9 +0,0 @@
language: node_js
node_js:
- "6"
- "8"
- "10"
- "node"
script: npm run travis
cache:
yarn: true

8
node_modules/async-limiter/LICENSE generated vendored
View File

@ -1,8 +0,0 @@
The MIT License (MIT)
Copyright (c) 2017 Samuel Reed <samuel.trace.reed@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

67
node_modules/async-limiter/index.js generated vendored
View File

@ -1,67 +0,0 @@
'use strict';
function Queue(options) {
if (!(this instanceof Queue)) {
return new Queue(options);
}
options = options || {};
this.concurrency = options.concurrency || Infinity;
this.pending = 0;
this.jobs = [];
this.cbs = [];
this._done = done.bind(this);
}
var arrayAddMethods = [
'push',
'unshift',
'splice'
];
arrayAddMethods.forEach(function(method) {
Queue.prototype[method] = function() {
var methodResult = Array.prototype[method].apply(this.jobs, arguments);
this._run();
return methodResult;
};
});
Object.defineProperty(Queue.prototype, 'length', {
get: function() {
return this.pending + this.jobs.length;
}
});
Queue.prototype._run = function() {
if (this.pending === this.concurrency) {
return;
}
if (this.jobs.length) {
var job = this.jobs.shift();
this.pending++;
job(this._done);
this._run();
}
if (this.pending === 0) {
while (this.cbs.length !== 0) {
var cb = this.cbs.pop();
process.nextTick(cb);
}
}
};
Queue.prototype.onDone = function(cb) {
if (typeof cb === 'function') {
this.cbs.push(cb);
this._run();
}
};
function done() {
this.pending--;
this._run();
}
module.exports = Queue;

View File

@ -1,35 +0,0 @@
{
"name": "async-limiter",
"version": "1.0.1",
"description": "asynchronous function queue with adjustable concurrency",
"keywords": [
"throttle",
"async",
"limiter",
"asynchronous",
"job",
"task",
"concurrency",
"concurrent"
],
"dependencies": {},
"devDependencies": {
"coveralls": "^3.0.3",
"eslint": "^5.16.0",
"eslint-plugin-mocha": "^5.3.0",
"intelli-espower-loader": "^1.0.1",
"mocha": "^6.1.4",
"nyc": "^14.1.1",
"power-assert": "^1.6.1"
},
"scripts": {
"test": "mocha --require intelli-espower-loader test/",
"travis": "npm run lint && npm run test",
"coverage": "nyc npm test && nyc report --reporter=text-lcov | coveralls",
"example": "node example",
"lint": "eslint ."
},
"repository": "https://github.com/strml/async-limiter.git",
"author": "Samuel Reed <samuel.trace.reed@gmail.com",
"license": "MIT"
}

132
node_modules/async-limiter/readme.md generated vendored
View File

@ -1,132 +0,0 @@
# Async-Limiter
A module for limiting concurrent asynchronous actions in flight. Forked from [queue](https://github.com/jessetane/queue).
[![npm](http://img.shields.io/npm/v/async-limiter.svg?style=flat-square)](http://www.npmjs.org/async-limiter)
[![tests](https://img.shields.io/travis/STRML/async-limiter.svg?style=flat-square&branch=master)](https://travis-ci.org/STRML/async-limiter)
[![coverage](https://img.shields.io/coveralls/STRML/async-limiter.svg?style=flat-square&branch=master)](https://coveralls.io/r/STRML/async-limiter)
This module exports a class `Limiter` that implements some of the `Array` API.
Pass async functions (ones that accept a callback or return a promise) to an instance's additive array methods.
## Motivation
Certain functions, like `zlib`, have [undesirable behavior](https://github.com/nodejs/node/issues/8871#issuecomment-250915913) when
run at infinite concurrency.
In this case, it is actually faster, and takes far less memory, to limit concurrency.
This module should do the absolute minimum work necessary to queue up functions. PRs are welcome that would
make this module faster or lighter, but new functionality is not desired.
Style should confirm to nodejs/node style.
## Example
``` javascript
var Limiter = require('async-limiter')
var t = new Limiter({concurrency: 2});
var results = []
// add jobs using the familiar Array API
t.push(function (cb) {
results.push('two')
cb()
})
t.push(
function (cb) {
results.push('four')
cb()
},
function (cb) {
results.push('five')
cb()
}
)
t.unshift(function (cb) {
results.push('one')
cb()
})
t.splice(2, 0, function (cb) {
results.push('three')
cb()
})
// Jobs run automatically. If you want a callback when all are done,
// call 'onDone()'.
t.onDone(function () {
console.log('all done:', results)
})
```
## Zlib Example
```js
const zlib = require('zlib');
const Limiter = require('async-limiter');
const message = {some: "data"};
const payload = new Buffer(JSON.stringify(message));
// Try with different concurrency values to see how this actually
// slows significantly with higher concurrency!
//
// 5: 1398.607ms
// 10: 1375.668ms
// Infinity: 4423.300ms
//
const t = new Limiter({concurrency: 5});
function deflate(payload, cb) {
t.push(function(done) {
zlib.deflate(payload, function(err, buffer) {
done();
cb(err, buffer);
});
});
}
console.time('deflate');
for(let i = 0; i < 30000; ++i) {
deflate(payload, function (err, buffer) {});
}
t.onDone(function() {
console.timeEnd('deflate');
});
```
## Install
`npm install async-limiter`
## Test
`npm test`
## API
### `var t = new Limiter([opts])`
Constructor. `opts` may contain inital values for:
* `t.concurrency`
## Instance methods
### `t.onDone(fn)`
`fn` will be called once and only once, when the queue is empty.
## Instance methods mixed in from `Array`
Mozilla has docs on how these methods work [here](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array).
### `t.push(element1, ..., elementN)`
### `t.unshift(element1, ..., elementN)`
### `t.splice(index , howMany[, element1[, ...[, elementN]]])`
## Properties
### `t.concurrency`
Max number of jobs the queue should process concurrently, defaults to `Infinity`.
### `t.length`
Jobs pending + jobs to process (readonly).

View File

@ -1,6 +1,6 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2016 David Frank Copyright (c) 2016 Alex Indigo
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal
@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. SOFTWARE.

233
node_modules/asynckit/README.md generated vendored Normal file
View File

@ -0,0 +1,233 @@
# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit)
Minimal async jobs utility library, with streams support.
[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit)
[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master)
[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit)
[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit)
<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) -->
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
| compression | size |
| :----------------- | -------: |
| asynckit.js | 12.34 kB |
| asynckit.min.js | 4.11 kB |
| asynckit.min.js.gz | 1.47 kB |
## Install
```sh
$ npm install --save asynckit
```
## Examples
### Parallel Jobs
Runs iterator over provided array in parallel. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will terminate rest of the active jobs (if abort function is provided)
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var parallel = require('asynckit').parallel
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, target = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// async job accepts one element from the array
// and a callback function
function asyncJob(item, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var parallel = require('asynckit/parallel')
, assert = require('assert')
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
, target = []
, keys = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
assert.deepEqual(keys, expectedKeys);
});
// supports full value, key, callback (shortcut) interface
function asyncJob(item, key, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
keys.push(key);
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
### Serial Jobs
Runs iterator over provided array sequentially. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will not proceed to the rest of the items in the list
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var serial = require('asynckit/serial')
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// extended interface (item, key, callback)
// also supported for arrays
function asyncJob(item, key, cb)
{
target.push(key);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var serial = require('asynckit').serial
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// shortcut interface (item, callback)
// works for object as well as for the arrays
function asyncJob(item, cb)
{
target.push(item);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
_Note: Since _object_ is an _unordered_ collection of properties,
it may produce unexpected results with sequential iterations.
Whenever order of the jobs' execution is important please use `serialOrdered` method._
### Ordered Serial Iterations
TBD
For example [compare-property](compare-property) package.
### Streaming interface
TBD
## Want to Know More?
More examples can be found in [test folder](test/).
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
## License
AsyncKit is licensed under the MIT license.

76
node_modules/asynckit/bench.js generated vendored Normal file
View File

@ -0,0 +1,76 @@
/* eslint no-console: "off" */
var asynckit = require('./')
, async = require('async')
, assert = require('assert')
, expected = 0
;
var Benchmark = require('benchmark');
var suite = new Benchmark.Suite;
var source = [];
for (var z = 1; z < 100; z++)
{
source.push(z);
expected += z;
}
suite
// add tests
.add('async.map', function(deferred)
{
var total = 0;
async.map(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
.add('asynckit.parallel', function(deferred)
{
var total = 0;
asynckit.parallel(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
// add listeners
.on('cycle', function(ev)
{
console.log(String(ev.target));
})
.on('complete', function()
{
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
// run async
.run({ 'async': true });

6
node_modules/asynckit/index.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
module.exports =
{
parallel : require('./parallel.js'),
serial : require('./serial.js'),
serialOrdered : require('./serialOrdered.js')
};

29
node_modules/asynckit/lib/abort.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
// API
module.exports = abort;
/**
* Aborts leftover active jobs
*
* @param {object} state - current state object
*/
function abort(state)
{
Object.keys(state.jobs).forEach(clean.bind(state));
// reset leftover jobs
state.jobs = {};
}
/**
* Cleans up leftover job by invoking abort function for the provided job id
*
* @this state
* @param {string|number} key - job id to abort
*/
function clean(key)
{
if (typeof this.jobs[key] == 'function')
{
this.jobs[key]();
}
}

34
node_modules/asynckit/lib/async.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
var defer = require('./defer.js');
// API
module.exports = async;
/**
* Runs provided callback asynchronously
* even if callback itself is not
*
* @param {function} callback - callback to invoke
* @returns {function} - augmented callback
*/
function async(callback)
{
var isAsync = false;
// check if async happened
defer(function() { isAsync = true; });
return function async_callback(err, result)
{
if (isAsync)
{
callback(err, result);
}
else
{
defer(function nextTick_callback()
{
callback(err, result);
});
}
};
}

26
node_modules/asynckit/lib/defer.js generated vendored Normal file
View File

@ -0,0 +1,26 @@
module.exports = defer;
/**
* Runs provided function on next iteration of the event loop
*
* @param {function} fn - function to run
*/
function defer(fn)
{
var nextTick = typeof setImmediate == 'function'
? setImmediate
: (
typeof process == 'object' && typeof process.nextTick == 'function'
? process.nextTick
: null
);
if (nextTick)
{
nextTick(fn);
}
else
{
setTimeout(fn, 0);
}
}

75
node_modules/asynckit/lib/iterate.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var async = require('./async.js')
, abort = require('./abort.js')
;
// API
module.exports = iterate;
/**
* Iterates over each job object
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {object} state - current job status
* @param {function} callback - invoked when all elements processed
*/
function iterate(list, iterator, state, callback)
{
// store current index
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
{
// don't repeat yourself
// skip secondary callbacks
if (!(key in state.jobs))
{
return;
}
// clean up jobs
delete state.jobs[key];
if (error)
{
// don't process rest of the results
// stop still active jobs
// and reset the list
abort(state);
}
else
{
state.results[key] = output;
}
// return salvaged results
callback(error, state.results);
});
}
/**
* Runs iterator over provided job element
*
* @param {function} iterator - iterator to invoke
* @param {string|number} key - key/index of the element in the list of jobs
* @param {mixed} item - job description
* @param {function} callback - invoked after iterator is done with the job
* @returns {function|mixed} - job abort function or something else
*/
function runJob(iterator, key, item, callback)
{
var aborter;
// allow shortcut if iterator expects only two arguments
if (iterator.length == 2)
{
aborter = iterator(item, async(callback));
}
// otherwise go with full three arguments
else
{
aborter = iterator(item, key, async(callback));
}
return aborter;
}

91
node_modules/asynckit/lib/readable_asynckit.js generated vendored Normal file
View File

@ -0,0 +1,91 @@
var streamify = require('./streamify.js')
, defer = require('./defer.js')
;
// API
module.exports = ReadableAsyncKit;
/**
* Base constructor for all streams
* used to hold properties/methods
*/
function ReadableAsyncKit()
{
ReadableAsyncKit.super_.apply(this, arguments);
// list of active jobs
this.jobs = {};
// add stream methods
this.destroy = destroy;
this._start = _start;
this._read = _read;
}
/**
* Destroys readable stream,
* by aborting outstanding jobs
*
* @returns {void}
*/
function destroy()
{
if (this.destroyed)
{
return;
}
this.destroyed = true;
if (typeof this.terminator == 'function')
{
this.terminator();
}
}
/**
* Starts provided jobs in async manner
*
* @private
*/
function _start()
{
// first argument runner function
var runner = arguments[0]
// take away first argument
, args = Array.prototype.slice.call(arguments, 1)
// second argument - input data
, input = args[0]
// last argument - result callback
, endCb = streamify.callback.call(this, args[args.length - 1])
;
args[args.length - 1] = endCb;
// third argument - iterator
args[1] = streamify.iterator.call(this, args[1]);
// allow time for proper setup
defer(function()
{
if (!this.destroyed)
{
this.terminator = runner.apply(null, args);
}
else
{
endCb(null, Array.isArray(input) ? [] : {});
}
}.bind(this));
}
/**
* Implement _read to comply with Readable streams
* Doesn't really make sense for flowing object mode
*
* @private
*/
function _read()
{
}

25
node_modules/asynckit/lib/readable_parallel.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var parallel = require('../parallel.js');
// API
module.exports = ReadableParallel;
/**
* Streaming wrapper to `asynckit.parallel`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableParallel(list, iterator, callback)
{
if (!(this instanceof ReadableParallel))
{
return new ReadableParallel(list, iterator, callback);
}
// turn on object mode
ReadableParallel.super_.call(this, {objectMode: true});
this._start(parallel, list, iterator, callback);
}

25
node_modules/asynckit/lib/readable_serial.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var serial = require('../serial.js');
// API
module.exports = ReadableSerial;
/**
* Streaming wrapper to `asynckit.serial`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerial(list, iterator, callback)
{
if (!(this instanceof ReadableSerial))
{
return new ReadableSerial(list, iterator, callback);
}
// turn on object mode
ReadableSerial.super_.call(this, {objectMode: true});
this._start(serial, list, iterator, callback);
}

29
node_modules/asynckit/lib/readable_serial_ordered.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var serialOrdered = require('../serialOrdered.js');
// API
module.exports = ReadableSerialOrdered;
// expose sort helpers
module.exports.ascending = serialOrdered.ascending;
module.exports.descending = serialOrdered.descending;
/**
* Streaming wrapper to `asynckit.serialOrdered`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
{
if (!(this instanceof ReadableSerialOrdered))
{
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
}
// turn on object mode
ReadableSerialOrdered.super_.call(this, {objectMode: true});
this._start(serialOrdered, list, iterator, sortMethod, callback);
}

37
node_modules/asynckit/lib/state.js generated vendored Normal file
View File

@ -0,0 +1,37 @@
// API
module.exports = state;
/**
* Creates initial state object
* for iteration over list
*
* @param {array|object} list - list to iterate over
* @param {function|null} sortMethod - function to use for keys sort,
* or `null` to keep them as is
* @returns {object} - initial state object
*/
function state(list, sortMethod)
{
var isNamedList = !Array.isArray(list)
, initState =
{
index : 0,
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
jobs : {},
results : isNamedList ? {} : [],
size : isNamedList ? Object.keys(list).length : list.length
}
;
if (sortMethod)
{
// sort array keys based on it's values
// sort object's keys just on own merit
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
{
return sortMethod(list[a], list[b]);
});
}
return initState;
}

141
node_modules/asynckit/lib/streamify.js generated vendored Normal file
View File

@ -0,0 +1,141 @@
var async = require('./async.js');
// API
module.exports = {
iterator: wrapIterator,
callback: wrapCallback
};
/**
* Wraps iterators with long signature
*
* @this ReadableAsyncKit#
* @param {function} iterator - function to wrap
* @returns {function} - wrapped function
*/
function wrapIterator(iterator)
{
var stream = this;
return function(item, key, cb)
{
var aborter
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
;
stream.jobs[key] = wrappedCb;
// it's either shortcut (item, cb)
if (iterator.length == 2)
{
aborter = iterator(item, wrappedCb);
}
// or long format (item, key, cb)
else
{
aborter = iterator(item, key, wrappedCb);
}
return aborter;
};
}
/**
* Wraps provided callback function
* allowing to execute snitch function before
* real callback
*
* @this ReadableAsyncKit#
* @param {function} callback - function to wrap
* @returns {function} - wrapped function
*/
function wrapCallback(callback)
{
var stream = this;
var wrapped = function(error, result)
{
return finisher.call(stream, error, result, callback);
};
return wrapped;
}
/**
* Wraps provided iterator callback function
* makes sure snitch only called once,
* but passes secondary calls to the original callback
*
* @this ReadableAsyncKit#
* @param {function} callback - callback to wrap
* @param {number|string} key - iteration key
* @returns {function} wrapped callback
*/
function wrapIteratorCallback(callback, key)
{
var stream = this;
return function(error, output)
{
// don't repeat yourself
if (!(key in stream.jobs))
{
callback(error, output);
return;
}
// clean up jobs
delete stream.jobs[key];
return streamer.call(stream, error, {key: key, value: output}, callback);
};
}
/**
* Stream wrapper for iterator callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects iterator results
*/
function streamer(error, output, callback)
{
if (error && !this.error)
{
this.error = error;
this.pause();
this.emit('error', error);
// send back value only, as expected
callback(error, output && output.value);
return;
}
// stream stuff
this.push(output);
// back to original track
// send back value only, as expected
callback(error, output && output.value);
}
/**
* Stream wrapper for finishing callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects final results
*/
function finisher(error, output, callback)
{
// signal end of the stream
// only for successfully finished streams
if (!error)
{
this.push(null);
}
// back to original track
callback(error, output);
}

29
node_modules/asynckit/lib/terminator.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var abort = require('./abort.js')
, async = require('./async.js')
;
// API
module.exports = terminator;
/**
* Terminates jobs in the attached state context
*
* @this AsyncKitState#
* @param {function} callback - final callback to invoke after termination
*/
function terminator(callback)
{
if (!Object.keys(this.jobs).length)
{
return;
}
// fast forward iteration index
this.index = this.size;
// abort jobs
abort(this);
// send back results we have so far
async(callback)(null, this.results);
}

63
node_modules/asynckit/package.json generated vendored Normal file
View File

@ -0,0 +1,63 @@
{
"name": "asynckit",
"version": "0.4.0",
"description": "Minimal async jobs utility library, with streams support",
"main": "index.js",
"scripts": {
"clean": "rimraf coverage",
"lint": "eslint *.js lib/*.js test/*.js",
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
"win-test": "tape test/test-*.js",
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
"report": "istanbul report",
"size": "browserify index.js | size-table asynckit",
"debug": "tape test/test-*.js"
},
"pre-commit": [
"clean",
"lint",
"test",
"browser",
"report",
"size"
],
"repository": {
"type": "git",
"url": "git+https://github.com/alexindigo/asynckit.git"
},
"keywords": [
"async",
"jobs",
"parallel",
"serial",
"iterator",
"array",
"object",
"stream",
"destroy",
"terminate",
"abort"
],
"author": "Alex Indigo <iam@alexindigo.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/alexindigo/asynckit/issues"
},
"homepage": "https://github.com/alexindigo/asynckit#readme",
"devDependencies": {
"browserify": "^13.0.0",
"browserify-istanbul": "^2.0.0",
"coveralls": "^2.11.9",
"eslint": "^2.9.0",
"istanbul": "^0.4.3",
"obake": "^0.1.2",
"phantomjs-prebuilt": "^2.1.7",
"pre-commit": "^1.1.3",
"reamde": "^1.1.0",
"rimraf": "^2.5.2",
"size-table": "^0.2.0",
"tap-spec": "^4.1.1",
"tape": "^4.5.1"
},
"dependencies": {}
}

43
node_modules/asynckit/parallel.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = parallel;
/**
* Runs iterator over provided array elements in parallel
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function parallel(list, iterator, callback)
{
var state = initState(list);
while (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, function(error, result)
{
if (error)
{
callback(error, result);
return;
}
// looks like it's the last one
if (Object.keys(state.jobs).length === 0)
{
callback(null, state.results);
return;
}
});
state.index++;
}
return terminator.bind(state, callback);
}

17
node_modules/asynckit/serial.js generated vendored Normal file
View File

@ -0,0 +1,17 @@
var serialOrdered = require('./serialOrdered.js');
// Public API
module.exports = serial;
/**
* Runs iterator over provided array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serial(list, iterator, callback)
{
return serialOrdered(list, iterator, null, callback);
}

75
node_modules/asynckit/serialOrdered.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = serialOrdered;
// sorting helpers
module.exports.ascending = ascending;
module.exports.descending = descending;
/**
* Runs iterator over provided sorted array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serialOrdered(list, iterator, sortMethod, callback)
{
var state = initState(list, sortMethod);
iterate(list, iterator, state, function iteratorHandler(error, result)
{
if (error)
{
callback(error, result);
return;
}
state.index++;
// are we there yet?
if (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, iteratorHandler);
return;
}
// done here
callback(null, state.results);
});
return terminator.bind(state, callback);
}
/*
* -- Sort methods
*/
/**
* sort helper to sort array elements in ascending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function ascending(a, b)
{
return a < b ? -1 : a > b ? 1 : 0;
}
/**
* sort helper to sort array elements in descending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function descending(a, b)
{
return -1 * ascending(a, b);
}

21
node_modules/asynckit/stream.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
var inherits = require('util').inherits
, Readable = require('stream').Readable
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
, ReadableParallel = require('./lib/readable_parallel.js')
, ReadableSerial = require('./lib/readable_serial.js')
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
;
// API
module.exports =
{
parallel : ReadableParallel,
serial : ReadableSerial,
serialOrdered : ReadableSerialOrdered,
};
inherits(ReadableAsyncKit, Readable);
inherits(ReadableParallel, ReadableAsyncKit);
inherits(ReadableSerial, ReadableAsyncKit);
inherits(ReadableSerialOrdered, ReadableAsyncKit);

1014
node_modules/axios/CHANGELOG.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
The MIT License (MIT) Copyright (c) 2014-present Matt Zabriskie
Copyright (c) 2012-2014 Raynos.
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

1182
node_modules/axios/README.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

5
node_modules/axios/SECURITY.md generated vendored Normal file
View File

@ -0,0 +1,5 @@
# Reporting a Vulnerability
If you discover a security vulnerability within axios, please submit a report via [huntr.dev](https://huntr.dev/bounties/?target=https%3A%2F%2Fgithub.com%2Faxios%2Faxios). Bounties and CVEs are automatically managed and allocated via the platform.
All security vulnerabilities will be promptly addressed.

169
node_modules/axios/UPGRADE_GUIDE.md generated vendored Normal file
View File

@ -0,0 +1,169 @@
# Upgrade Guide
## 0.18.x -> 0.19.0
### HTTPS Proxies
Routing through an https proxy now requires setting the `protocol` attribute of the proxy configuration to `https`
## 0.15.x -> 0.16.0
### `Promise` Type Declarations
The `Promise` type declarations have been removed from the axios typings in favor of the built-in type declarations. If you use axios in a TypeScript project that targets `ES5`, please make sure to include the `es2015.promise` lib. Please see [this post](https://blog.mariusschulz.com/2016/11/25/typescript-2-0-built-in-type-declarations) for details.
## 0.13.x -> 0.14.0
### TypeScript Definitions
The axios TypeScript definitions have been updated to match the axios API and use the ES2015 module syntax.
Please use the following `import` statement to import axios in TypeScript:
```typescript
import axios from 'axios';
axios.get('/foo')
.then(response => console.log(response))
.catch(error => console.log(error));
```
### `agent` Config Option
The `agent` config option has been replaced with two new options: `httpAgent` and `httpsAgent`. Please use them instead.
```js
{
// Define a custom agent for HTTP
httpAgent: new http.Agent({ keepAlive: true }),
// Define a custom agent for HTTPS
httpsAgent: new https.Agent({ keepAlive: true })
}
```
### `progress` Config Option
The `progress` config option has been replaced with the `onUploadProgress` and `onDownloadProgress` options.
```js
{
// Define a handler for upload progress events
onUploadProgress: function (progressEvent) {
// ...
},
// Define a handler for download progress events
onDownloadProgress: function (progressEvent) {
// ...
}
}
```
## 0.12.x -> 0.13.0
The `0.13.0` release contains several changes to custom adapters and error handling.
### Error Handling
Previous to this release an error could either be a server response with bad status code or an actual `Error`. With this release Promise will always reject with an `Error`. In the case that a response was received, the `Error` will also include the response.
```js
axios.get('/user/12345')
.catch((error) => {
console.log(error.message);
console.log(error.code); // Not always specified
console.log(error.config); // The config that was used to make the request
console.log(error.response); // Only available if response was received from the server
});
```
### Request Adapters
This release changes a few things about how request adapters work. Please take note if you are using your own custom adapter.
1. Response transformer is now called outside of adapter.
2. Request adapter returns a `Promise`.
This means that you no longer need to invoke `transformData` on response data. You will also no longer receive `resolve` and `reject` as arguments in your adapter.
Previous code:
```js
function myAdapter(resolve, reject, config) {
var response = {
data: transformData(
responseData,
responseHeaders,
config.transformResponse
),
status: request.status,
statusText: request.statusText,
headers: responseHeaders
};
settle(resolve, reject, response);
}
```
New code:
```js
function myAdapter(config) {
return new Promise(function (resolve, reject) {
var response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders
};
settle(resolve, reject, response);
});
}
```
See the related commits for more details:
- [Response transformers](https://github.com/axios/axios/commit/10eb23865101f9347570552c04e9d6211376e25e)
- [Request adapter Promise](https://github.com/axios/axios/commit/157efd5615890301824e3121cc6c9d2f9b21f94a)
## 0.5.x -> 0.6.0
The `0.6.0` release contains mostly bug fixes, but there are a couple things to be aware of when upgrading.
### ES6 Promise Polyfill
Up until the `0.6.0` release ES6 `Promise` was being polyfilled using [es6-promise](https://github.com/jakearchibald/es6-promise). With this release, the polyfill has been removed, and you will need to supply it yourself if your environment needs it.
```js
require('es6-promise').polyfill();
var axios = require('axios');
```
This will polyfill the global environment, and only needs to be done once.
### `axios.success`/`axios.error`
The `success`, and `error` aliases were deprecated in [0.4.0](https://github.com/axios/axios/blob/master/CHANGELOG.md#040-oct-03-2014). As of this release they have been removed entirely. Instead please use `axios.then`, and `axios.catch` respectively.
```js
axios.get('some/url')
.then(function (res) {
/* ... */
})
.catch(function (err) {
/* ... */
});
```
### UMD
Previous versions of axios shipped with an AMD, CommonJS, and Global build. This has all been rolled into a single UMD build.
```js
// AMD
require(['bower_components/axios/dist/axios'], function (axios) {
/* ... */
});
// CommonJS
var axios = require('axios/dist/axios');
```

19
node_modules/axios/bin/check-build-version.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
const fs = require('fs');
const assert = require('assert');
const axios = require('../index.js');
const {version} = JSON.parse(fs.readFileSync('./package.json'));
console.log('Checking versions...\n----------------------------')
console.log(`Package version: v${version}`);
console.log(`Axios version: v${axios.VERSION}`);
console.log(`----------------------------`);
assert.strictEqual(
version,
axios.VERSION,
`Version mismatch between package and Axios ${version} != ${axios.VERSION}`
);
console.log('✔️ PASSED\n');

22
node_modules/axios/bin/ssl_hotfix.js generated vendored Normal file
View File

@ -0,0 +1,22 @@
const {spawn} = require('child_process');
const args = process.argv.slice(2);
console.log(`Running ${args.join(' ')} on ${process.version}\n`);
const match = /v(\d+)/.exec(process.version);
const isHotfixNeeded = match && match[1] > 16;
isHotfixNeeded && console.warn('Setting --openssl-legacy-provider as ssl hotfix');
const test = spawn('cross-env',
isHotfixNeeded ? ['NODE_OPTIONS=--openssl-legacy-provider', ...args] : args, {
shell: true,
stdio: 'inherit'
}
);
test.on('exit', function (code) {
process.exit(code)
})

2377
node_modules/axios/dist/axios.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/axios.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/axios/dist/axios.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/axios/dist/axios.min.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2369
node_modules/axios/dist/esm/axios.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/axios/dist/esm/axios.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

2
node_modules/axios/dist/esm/axios.min.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/axios/dist/esm/axios.min.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

340
node_modules/axios/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,340 @@
// TypeScript Version: 4.1
type HeaderValue = string | string[] | number | boolean;
type AxiosHeaders = Record<string, HeaderValue | Record<Method & CommonHeaders, HeaderValue>>;
type MethodsHeaders = {
[Key in Method as Lowercase<Key>]: AxiosHeaders;
};
interface CommonHeaders {
common: AxiosHeaders;
}
export type AxiosRequestHeaders = Partial<AxiosHeaders & MethodsHeaders & CommonHeaders>;
export type AxiosResponseHeaders = Partial<Record<string, string>> & {
"set-cookie"?: string[]
};
export interface AxiosRequestTransformer {
(data: any, headers: AxiosRequestHeaders): any;
}
export interface AxiosResponseTransformer {
(data: any, headers?: AxiosResponseHeaders, status?: number): any;
}
export interface AxiosAdapter {
(config: AxiosRequestConfig): AxiosPromise;
}
export interface AxiosBasicCredentials {
username: string;
password: string;
}
export interface AxiosProxyConfig {
host: string;
port: number;
auth?: {
username: string;
password: string;
};
protocol?: string;
}
export type Method =
| 'get' | 'GET'
| 'delete' | 'DELETE'
| 'head' | 'HEAD'
| 'options' | 'OPTIONS'
| 'post' | 'POST'
| 'put' | 'PUT'
| 'patch' | 'PATCH'
| 'purge' | 'PURGE'
| 'link' | 'LINK'
| 'unlink' | 'UNLINK';
export type ResponseType =
| 'arraybuffer'
| 'blob'
| 'document'
| 'json'
| 'text'
| 'stream';
export type responseEncoding =
| 'ascii' | 'ASCII'
| 'ansi' | 'ANSI'
| 'binary' | 'BINARY'
| 'base64' | 'BASE64'
| 'base64url' | 'BASE64URL'
| 'hex' | 'HEX'
| 'latin1' | 'LATIN1'
| 'ucs-2' | 'UCS-2'
| 'ucs2' | 'UCS2'
| 'utf-8' | 'UTF-8'
| 'utf8' | 'UTF8'
| 'utf16le' | 'UTF16LE';
export interface TransitionalOptions {
silentJSONParsing?: boolean;
forcedJSONParsing?: boolean;
clarifyTimeoutError?: boolean;
}
export interface GenericAbortSignal {
aborted: boolean;
onabort: ((...args: any) => any) | null;
addEventListener: (...args: any) => any;
removeEventListener: (...args: any) => any;
}
export interface FormDataVisitorHelpers {
defaultVisitor: SerializerVisitor;
convertValue: (value: any) => any;
isVisitable: (value: any) => boolean;
}
export interface SerializerVisitor {
(
this: GenericFormData,
value: any,
key: string | number,
path: null | Array<string | number>,
helpers: FormDataVisitorHelpers
): boolean;
}
export interface SerializerOptions {
visitor?: SerializerVisitor;
dots?: boolean;
metaTokens?: boolean;
indexes?: boolean | null;
}
// tslint:disable-next-line
export interface FormSerializerOptions extends SerializerOptions {
}
export interface ParamEncoder {
(value: any, defaultEncoder: (value: any) => any): any;
}
export interface CustomParamsSerializer {
(params: Record<string, any>, options?: ParamsSerializerOptions): string;
}
export interface ParamsSerializerOptions extends SerializerOptions {
encode?: ParamEncoder;
serialize?: CustomParamsSerializer;
}
export interface AxiosRequestConfig<D = any> {
url?: string;
method?: Method | string;
baseURL?: string;
transformRequest?: AxiosRequestTransformer | AxiosRequestTransformer[];
transformResponse?: AxiosResponseTransformer | AxiosResponseTransformer[];
headers?: AxiosRequestHeaders;
params?: any;
paramsSerializer?: ParamsSerializerOptions | CustomParamsSerializer;
data?: D;
timeout?: number;
timeoutErrorMessage?: string;
withCredentials?: boolean;
adapter?: AxiosAdapter;
auth?: AxiosBasicCredentials;
responseType?: ResponseType;
responseEncoding?: responseEncoding | string;
xsrfCookieName?: string;
xsrfHeaderName?: string;
onUploadProgress?: (progressEvent: ProgressEvent) => void;
onDownloadProgress?: (progressEvent: ProgressEvent) => void;
maxContentLength?: number;
validateStatus?: ((status: number) => boolean) | null;
maxBodyLength?: number;
maxRedirects?: number;
beforeRedirect?: (options: Record<string, any>, responseDetails: {headers: Record<string, string>}) => void;
socketPath?: string | null;
httpAgent?: any;
httpsAgent?: any;
proxy?: AxiosProxyConfig | false;
cancelToken?: CancelToken;
decompress?: boolean;
transitional?: TransitionalOptions;
signal?: GenericAbortSignal;
insecureHTTPParser?: boolean;
env?: {
FormData?: new (...args: any[]) => object;
};
formSerializer?: FormSerializerOptions;
withXSRFToken?: boolean | ((config: AxiosRequestConfig) => boolean | undefined);
}
export interface HeadersDefaults {
common: AxiosRequestHeaders;
delete: AxiosRequestHeaders;
get: AxiosRequestHeaders;
head: AxiosRequestHeaders;
post: AxiosRequestHeaders;
put: AxiosRequestHeaders;
patch: AxiosRequestHeaders;
options?: AxiosRequestHeaders;
purge?: AxiosRequestHeaders;
link?: AxiosRequestHeaders;
unlink?: AxiosRequestHeaders;
}
export interface AxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers: HeadersDefaults;
}
export interface CreateAxiosDefaults<D = any> extends Omit<AxiosRequestConfig<D>, 'headers'> {
headers?: AxiosRequestHeaders | Partial<HeadersDefaults>;
}
export interface AxiosResponse<T = any, D = any> {
data: T;
status: number;
statusText: string;
headers: AxiosResponseHeaders;
config: AxiosRequestConfig<D>;
request?: any;
}
export class AxiosError<T = unknown, D = any> extends Error {
constructor(
message?: string,
code?: string,
config?: AxiosRequestConfig<D>,
request?: any,
response?: AxiosResponse<T, D>
);
config?: AxiosRequestConfig<D>;
code?: string;
request?: any;
response?: AxiosResponse<T, D>;
isAxiosError: boolean;
status?: number;
toJSON: () => object;
cause?: Error;
static readonly ERR_FR_TOO_MANY_REDIRECTS = "ERR_FR_TOO_MANY_REDIRECTS";
static readonly ERR_BAD_OPTION_VALUE = "ERR_BAD_OPTION_VALUE";
static readonly ERR_BAD_OPTION = "ERR_BAD_OPTION";
static readonly ERR_NETWORK = "ERR_NETWORK";
static readonly ERR_DEPRECATED = "ERR_DEPRECATED";
static readonly ERR_BAD_RESPONSE = "ERR_BAD_RESPONSE";
static readonly ERR_BAD_REQUEST = "ERR_BAD_REQUEST";
static readonly ERR_NOT_SUPPORT = "ERR_NOT_SUPPORT";
static readonly ERR_INVALID_URL = "ERR_INVALID_URL";
static readonly ERR_CANCELED = "ERR_CANCELED";
static readonly ECONNABORTED = "ECONNABORTED";
static readonly ETIMEDOUT = "ETIMEDOUT";
}
export class CanceledError<T> extends AxiosError<T> {
}
export type AxiosPromise<T = any> = Promise<AxiosResponse<T>>;
export interface CancelStatic {
new (message?: string): Cancel;
}
export interface Cancel {
message: string | undefined;
}
export interface Canceler {
(message?: string, config?: AxiosRequestConfig, request?: any): void;
}
export interface CancelTokenStatic {
new (executor: (cancel: Canceler) => void): CancelToken;
source(): CancelTokenSource;
}
export interface CancelToken {
promise: Promise<Cancel>;
reason?: Cancel;
throwIfRequested(): void;
}
export interface CancelTokenSource {
token: CancelToken;
cancel: Canceler;
}
export interface AxiosInterceptorOptions {
synchronous?: boolean;
runWhen?: (config: AxiosRequestConfig) => boolean;
}
export interface AxiosInterceptorManager<V> {
use<T = V>(onFulfilled?: (value: V) => T | Promise<T>, onRejected?: (error: any) => any, options?: AxiosInterceptorOptions): number;
eject(id: number): void;
}
export class Axios {
constructor(config?: AxiosRequestConfig);
defaults: AxiosDefaults;
interceptors: {
request: AxiosInterceptorManager<AxiosRequestConfig>;
response: AxiosInterceptorManager<AxiosResponse>;
};
getUri(config?: AxiosRequestConfig): string;
request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R>;
get<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
delete<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
head<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
options<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): Promise<R>;
post<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
put<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
patch<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
postForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
putForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
patchForm<T = any, R = AxiosResponse<T>, D = any>(url: string, data?: D, config?: AxiosRequestConfig<D>): Promise<R>;
}
export interface AxiosInstance extends Axios {
<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): AxiosPromise<R>;
<T = any, R = AxiosResponse<T>, D = any>(url: string, config?: AxiosRequestConfig<D>): AxiosPromise<R>;
defaults: Omit<AxiosDefaults, 'headers'> & {
headers: HeadersDefaults & {
[key: string]: string | number | boolean | undefined
}
};
}
export interface GenericFormData {
append(name: string, value: any, options?: any): any;
}
export interface GenericHTMLFormElement {
name: string;
method: string;
submit(): void;
}
export interface AxiosStatic extends AxiosInstance {
create(config?: CreateAxiosDefaults): AxiosInstance;
Cancel: CancelStatic;
CancelToken: CancelTokenStatic;
Axios: typeof Axios;
AxiosError: typeof AxiosError;
readonly VERSION: string;
isCancel(value: any): value is Cancel;
all<T>(values: Array<T | Promise<T>>): Promise<T[]>;
spread<T, R>(callback: (...args: T[]) => R): (array: T[]) => R;
isAxiosError<T = any, D = any>(payload: any): payload is AxiosError<T, D>;
toFormData(sourceObj: object, targetFormData?: GenericFormData, options?: FormSerializerOptions): GenericFormData;
formToJSON(form: GenericFormData|GenericHTMLFormElement): object;
}
declare const axios: AxiosStatic;
export default axios;

1
node_modules/axios/index.js generated vendored Normal file
View File

@ -0,0 +1 @@
module.exports = require('./lib/axios');

37
node_modules/axios/lib/adapters/README.md generated vendored Normal file
View File

@ -0,0 +1,37 @@
# axios // adapters
The modules under `adapters/` are modules that handle dispatching a request and settling a returned `Promise` once a response is received.
## Example
```js
var settle = require('./../core/settle');
module.exports = function myAdapter(config) {
// At this point:
// - config has been merged with defaults
// - request transformers have already run
// - request interceptors have already run
// Make the request using config provided
// Upon response settle the Promise
return new Promise(function(resolve, reject) {
var response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config: config,
request: request
};
settle(resolve, reject, response);
// From here:
// - response transformers will run
// - response interceptors will run
});
}
```

463
node_modules/axios/lib/adapters/http.js generated vendored Normal file
View File

@ -0,0 +1,463 @@
'use strict';
var utils = require('./../utils');
var settle = require('./../core/settle');
var buildFullPath = require('../core/buildFullPath');
var buildURL = require('./../helpers/buildURL');
var getProxyForUrl = require('proxy-from-env').getProxyForUrl;
var http = require('http');
var https = require('https');
var httpFollow = require('follow-redirects/http');
var httpsFollow = require('follow-redirects/https');
var url = require('url');
var zlib = require('zlib');
var VERSION = require('./../env/data').version;
var transitionalDefaults = require('../defaults/transitional');
var AxiosError = require('../core/AxiosError');
var CanceledError = require('../cancel/CanceledError');
var platform = require('../platform');
var fromDataURI = require('../helpers/fromDataURI');
var stream = require('stream');
var isHttps = /https:?/;
var supportedProtocols = platform.protocols.map(function(protocol) {
return protocol + ':';
});
function dispatchBeforeRedirect(options) {
if (options.beforeRedirects.proxy) {
options.beforeRedirects.proxy(options);
}
if (options.beforeRedirects.config) {
options.beforeRedirects.config(options);
}
}
/**
*
* @param {http.ClientRequestArgs} options
* @param {AxiosProxyConfig} configProxy
* @param {string} location
*/
function setProxy(options, configProxy, location) {
var proxy = configProxy;
if (!proxy && proxy !== false) {
var proxyUrl = getProxyForUrl(location);
if (proxyUrl) {
proxy = url.parse(proxyUrl);
// replace 'host' since the proxy object is not a URL object
proxy.host = proxy.hostname;
}
}
if (proxy) {
// Basic proxy authorization
if (proxy.auth) {
// Support proxy auth object form
if (proxy.auth.username || proxy.auth.password) {
proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || '');
}
var base64 = Buffer
.from(proxy.auth, 'utf8')
.toString('base64');
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
}
options.headers.host = options.hostname + (options.port ? ':' + options.port : '');
options.hostname = proxy.host;
options.host = proxy.host;
options.port = proxy.port;
options.path = location;
if (proxy.protocol) {
options.protocol = proxy.protocol;
}
}
options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) {
// Configure proxy for redirected request, passing the original config proxy to apply
// the exact same logic as if the redirected request was performed by axios directly.
setProxy(redirectOptions, configProxy, redirectOptions.href);
};
}
/*eslint consistent-return:0*/
module.exports = function httpAdapter(config) {
return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) {
var onCanceled;
function done() {
if (config.cancelToken) {
config.cancelToken.unsubscribe(onCanceled);
}
if (config.signal) {
config.signal.removeEventListener('abort', onCanceled);
}
}
var resolve = function resolve(value) {
done();
resolvePromise(value);
};
var rejected = false;
var reject = function reject(value) {
done();
rejected = true;
rejectPromise(value);
};
var data = config.data;
var responseType = config.responseType;
var responseEncoding = config.responseEncoding;
var method = config.method.toUpperCase();
// Parse url
var fullPath = buildFullPath(config.baseURL, config.url);
var parsed = url.parse(fullPath);
var protocol = parsed.protocol || supportedProtocols[0];
if (protocol === 'data:') {
var convertedData;
if (method !== 'GET') {
return settle(resolve, reject, {
status: 405,
statusText: 'method not allowed',
headers: {},
config: config
});
}
try {
convertedData = fromDataURI(config.url, responseType === 'blob', {
Blob: config.env && config.env.Blob
});
} catch (err) {
throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config);
}
if (responseType === 'text') {
convertedData = convertedData.toString(responseEncoding);
if (!responseEncoding || responseEncoding === 'utf8') {
data = utils.stripBOM(convertedData);
}
} else if (responseType === 'stream') {
convertedData = stream.Readable.from(convertedData);
}
return settle(resolve, reject, {
data: convertedData,
status: 200,
statusText: 'OK',
headers: {},
config: config
});
}
if (supportedProtocols.indexOf(protocol) === -1) {
return reject(new AxiosError(
'Unsupported protocol ' + protocol,
AxiosError.ERR_BAD_REQUEST,
config
));
}
var headers = config.headers;
var headerNames = {};
Object.keys(headers).forEach(function storeLowerName(name) {
headerNames[name.toLowerCase()] = name;
});
// Set User-Agent (required by some servers)
// See https://github.com/axios/axios/issues/69
if ('user-agent' in headerNames) {
// User-Agent is specified; handle case where no UA header is desired
if (!headers[headerNames['user-agent']]) {
delete headers[headerNames['user-agent']];
}
// Otherwise, use specified value
} else {
// Only set header if it hasn't been set in config
headers['User-Agent'] = 'axios/' + VERSION;
}
// support for https://www.npmjs.com/package/form-data api
if (utils.isFormData(data) && utils.isFunction(data.getHeaders)) {
Object.assign(headers, data.getHeaders());
} else if (data && !utils.isStream(data)) {
if (Buffer.isBuffer(data)) {
// Nothing to do...
} else if (utils.isArrayBuffer(data)) {
data = Buffer.from(new Uint8Array(data));
} else if (utils.isString(data)) {
data = Buffer.from(data, 'utf-8');
} else {
return reject(new AxiosError(
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
AxiosError.ERR_BAD_REQUEST,
config
));
}
if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) {
return reject(new AxiosError(
'Request body larger than maxBodyLength limit',
AxiosError.ERR_BAD_REQUEST,
config
));
}
// Add Content-Length header if data exists
if (!headerNames['content-length']) {
headers['Content-Length'] = data.length;
}
}
// HTTP basic authentication
var auth = undefined;
if (config.auth) {
var username = config.auth.username || '';
var password = config.auth.password || '';
auth = username + ':' + password;
}
if (!auth && parsed.auth) {
var urlAuth = parsed.auth.split(':');
var urlUsername = urlAuth[0] || '';
var urlPassword = urlAuth[1] || '';
auth = urlUsername + ':' + urlPassword;
}
if (auth && headerNames.authorization) {
delete headers[headerNames.authorization];
}
try {
buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, '');
} catch (err) {
var customErr = new Error(err.message);
customErr.config = config;
customErr.url = config.url;
customErr.exists = true;
reject(customErr);
}
var options = {
path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''),
method: method,
headers: headers,
agents: { http: config.httpAgent, https: config.httpsAgent },
auth: auth,
protocol: protocol,
beforeRedirect: dispatchBeforeRedirect,
beforeRedirects: {}
};
if (config.socketPath) {
options.socketPath = config.socketPath;
} else {
options.hostname = parsed.hostname;
options.port = parsed.port;
setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path);
}
var transport;
var isHttpsRequest = isHttps.test(options.protocol);
options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent;
if (config.transport) {
transport = config.transport;
} else if (config.maxRedirects === 0) {
transport = isHttpsRequest ? https : http;
} else {
if (config.maxRedirects) {
options.maxRedirects = config.maxRedirects;
}
if (config.beforeRedirect) {
options.beforeRedirects.config = config.beforeRedirect;
}
transport = isHttpsRequest ? httpsFollow : httpFollow;
}
if (config.maxBodyLength > -1) {
options.maxBodyLength = config.maxBodyLength;
} else {
// follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited
options.maxBodyLength = Infinity;
}
if (config.insecureHTTPParser) {
options.insecureHTTPParser = config.insecureHTTPParser;
}
// Create the request
var req = transport.request(options, function handleResponse(res) {
if (req.aborted) return;
// uncompress the response body transparently if required
var responseStream = res;
// return the last request in case of redirects
var lastRequest = res.req || req;
// if decompress disabled we should not decompress
if (config.decompress !== false) {
// if no content, but headers still say that it is encoded,
// remove the header not confuse downstream operations
if (data && data.length === 0 && res.headers['content-encoding']) {
delete res.headers['content-encoding'];
}
switch (res.headers['content-encoding']) {
/*eslint default-case:0*/
case 'gzip':
case 'compress':
case 'deflate':
// add the unzipper to the body stream processing pipeline
responseStream = responseStream.pipe(zlib.createUnzip());
// remove the content-encoding in order to not confuse downstream operations
delete res.headers['content-encoding'];
break;
}
}
var response = {
status: res.statusCode,
statusText: res.statusMessage,
headers: res.headers,
config: config,
request: lastRequest
};
if (responseType === 'stream') {
response.data = responseStream;
settle(resolve, reject, response);
} else {
var responseBuffer = [];
var totalResponseBytes = 0;
responseStream.on('data', function handleStreamData(chunk) {
responseBuffer.push(chunk);
totalResponseBytes += chunk.length;
// make sure the content length is not over the maxContentLength if specified
if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) {
// stream.destroy() emit aborted event before calling reject() on Node.js v16
rejected = true;
responseStream.destroy();
reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
AxiosError.ERR_BAD_RESPONSE, config, lastRequest));
}
});
responseStream.on('aborted', function handlerStreamAborted() {
if (rejected) {
return;
}
responseStream.destroy();
reject(new AxiosError(
'maxContentLength size of ' + config.maxContentLength + ' exceeded',
AxiosError.ERR_BAD_RESPONSE,
config,
lastRequest
));
});
responseStream.on('error', function handleStreamError(err) {
if (req.aborted) return;
reject(AxiosError.from(err, null, config, lastRequest));
});
responseStream.on('end', function handleStreamEnd() {
try {
var responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer);
if (responseType !== 'arraybuffer') {
responseData = responseData.toString(responseEncoding);
if (!responseEncoding || responseEncoding === 'utf8') {
responseData = utils.stripBOM(responseData);
}
}
response.data = responseData;
} catch (err) {
reject(AxiosError.from(err, null, config, response.request, response));
}
settle(resolve, reject, response);
});
}
});
// Handle errors
req.on('error', function handleRequestError(err) {
// @todo remove
// if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return;
reject(AxiosError.from(err, null, config, req));
});
// set tcp keep alive to prevent drop connection by peer
req.on('socket', function handleRequestSocket(socket) {
// default interval of sending ack packet is 1 minute
socket.setKeepAlive(true, 1000 * 60);
});
// Handle request timeout
if (config.timeout) {
// This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types.
var timeout = parseInt(config.timeout, 10);
if (isNaN(timeout)) {
reject(new AxiosError(
'error trying to parse `config.timeout` to int',
AxiosError.ERR_BAD_OPTION_VALUE,
config,
req
));
return;
}
// Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system.
// And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET.
// At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up.
// And then these socket which be hang up will devouring CPU little by little.
// ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect.
req.setTimeout(timeout, function handleRequestTimeout() {
req.abort();
var timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
var transitional = config.transitional || transitionalDefaults;
if (config.timeoutErrorMessage) {
timeoutErrorMessage = config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
config,
req
));
});
}
if (config.cancelToken || config.signal) {
// Handle cancellation
// eslint-disable-next-line func-names
onCanceled = function(cancel) {
if (req.aborted) return;
req.abort();
reject(!cancel || cancel.type ? new CanceledError(null, config, req) : cancel);
};
config.cancelToken && config.cancelToken.subscribe(onCanceled);
if (config.signal) {
config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled);
}
}
// Send the request
if (utils.isStream(data)) {
data.on('error', function handleStreamError(err) {
reject(AxiosError.from(err, config, null, req));
}).pipe(req);
} else {
req.end(data);
}
});
};

226
node_modules/axios/lib/adapters/xhr.js generated vendored Normal file
View File

@ -0,0 +1,226 @@
'use strict';
var utils = require('./../utils');
var settle = require('./../core/settle');
var cookies = require('./../helpers/cookies');
var buildURL = require('./../helpers/buildURL');
var buildFullPath = require('../core/buildFullPath');
var parseHeaders = require('./../helpers/parseHeaders');
var isURLSameOrigin = require('./../helpers/isURLSameOrigin');
var transitionalDefaults = require('../defaults/transitional');
var AxiosError = require('../core/AxiosError');
var CanceledError = require('../cancel/CanceledError');
var parseProtocol = require('../helpers/parseProtocol');
var platform = require('../platform');
module.exports = function xhrAdapter(config) {
return new Promise(function dispatchXhrRequest(resolve, reject) {
var requestData = config.data;
var requestHeaders = config.headers;
var responseType = config.responseType;
var withXSRFToken = config.withXSRFToken;
var onCanceled;
function done() {
if (config.cancelToken) {
config.cancelToken.unsubscribe(onCanceled);
}
if (config.signal) {
config.signal.removeEventListener('abort', onCanceled);
}
}
if (utils.isFormData(requestData) && utils.isStandardBrowserEnv()) {
delete requestHeaders['Content-Type']; // Let the browser set it
}
var request = new XMLHttpRequest();
// HTTP basic authentication
if (config.auth) {
var username = config.auth.username || '';
var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : '';
requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password);
}
var fullPath = buildFullPath(config.baseURL, config.url);
request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true);
// Set the request timeout in MS
request.timeout = config.timeout;
function onloadend() {
if (!request) {
return;
}
// Prepare the response
var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null;
var responseData = !responseType || responseType === 'text' || responseType === 'json' ?
request.responseText : request.response;
var response = {
data: responseData,
status: request.status,
statusText: request.statusText,
headers: responseHeaders,
config: config,
request: request
};
settle(function _resolve(value) {
resolve(value);
done();
}, function _reject(err) {
reject(err);
done();
}, response);
// Clean up request
request = null;
}
if ('onloadend' in request) {
// Use onloadend if available
request.onloadend = onloadend;
} else {
// Listen for ready state to emulate onloadend
request.onreadystatechange = function handleLoad() {
if (!request || request.readyState !== 4) {
return;
}
// The request errored out and we didn't get a response, this will be
// handled by onerror instead
// With one exception: request that using file: protocol, most browsers
// will return status as 0 even though it's a successful request
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
return;
}
// readystate handler is calling before onerror or ontimeout handlers,
// so we should call onloadend on the next 'tick'
setTimeout(onloadend);
};
}
// Handle browser request cancellation (as opposed to a manual cancellation)
request.onabort = function handleAbort() {
if (!request) {
return;
}
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request));
// Clean up request
request = null;
};
// Handle low level network errors
request.onerror = function handleError() {
// Real errors are hidden from us by the browser
// onerror should only fire if it's a network error
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request));
// Clean up request
request = null;
};
// Handle timeout
request.ontimeout = function handleTimeout() {
var timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
var transitional = config.transitional || transitionalDefaults;
if (config.timeoutErrorMessage) {
timeoutErrorMessage = config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
config,
request));
// Clean up request
request = null;
};
// Add xsrf header
// This is only done if running in a standard browser environment.
// Specifically not if we're in a web worker, or react-native.
if (utils.isStandardBrowserEnv()) {
// Add xsrf header
withXSRFToken && utils.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(config));
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(fullPath))) {
// Add xsrf header
var xsrfValue = config.xsrfHeaderName && config.xsrfCookieName && cookies.read(config.xsrfCookieName);
if (xsrfValue) {
requestHeaders[config.xsrfHeaderName] = xsrfValue;
}
}
}
// Add headers to the request
if ('setRequestHeader' in request) {
utils.forEach(requestHeaders, function setRequestHeader(val, key) {
if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') {
// Remove Content-Type if data is undefined
delete requestHeaders[key];
} else {
// Otherwise add header to the request
request.setRequestHeader(key, val);
}
});
}
// Add withCredentials to request if needed
if (!utils.isUndefined(config.withCredentials)) {
request.withCredentials = !!config.withCredentials;
}
// Add responseType to request if needed
if (responseType && responseType !== 'json') {
request.responseType = config.responseType;
}
// Handle progress if needed
if (typeof config.onDownloadProgress === 'function') {
request.addEventListener('progress', config.onDownloadProgress);
}
// Not all browsers support upload events
if (typeof config.onUploadProgress === 'function' && request.upload) {
request.upload.addEventListener('progress', config.onUploadProgress);
}
if (config.cancelToken || config.signal) {
// Handle cancellation
// eslint-disable-next-line func-names
onCanceled = function(cancel) {
if (!request) {
return;
}
reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel);
request.abort();
request = null;
};
config.cancelToken && config.cancelToken.subscribe(onCanceled);
if (config.signal) {
config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled);
}
}
// false, 0 (zero number), and '' (empty string) are valid JSON values
if (!requestData && requestData !== false && requestData !== 0 && requestData !== '') {
requestData = null;
}
var protocol = parseProtocol(fullPath);
if (protocol && platform.protocols.indexOf(protocol) === -1) {
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
return;
}
// Send the request
request.send(requestData);
});
};

68
node_modules/axios/lib/axios.js generated vendored Normal file
View File

@ -0,0 +1,68 @@
'use strict';
var utils = require('./utils');
var bind = require('./helpers/bind');
var Axios = require('./core/Axios');
var mergeConfig = require('./core/mergeConfig');
var defaults = require('./defaults');
var formDataToJSON = require('./helpers/formDataToJSON');
/**
* Create an instance of Axios
*
* @param {Object} defaultConfig The default config for the instance
* @return {Axios} A new instance of Axios
*/
function createInstance(defaultConfig) {
var context = new Axios(defaultConfig);
var instance = bind(Axios.prototype.request, context);
// Copy axios.prototype to instance
utils.extend(instance, Axios.prototype, context);
// Copy context to instance
utils.extend(instance, context);
// Factory for creating new instances
instance.create = function create(instanceConfig) {
return createInstance(mergeConfig(defaultConfig, instanceConfig));
};
return instance;
}
// Create the default instance to be exported
var axios = createInstance(defaults);
// Expose Axios class to allow class inheritance
axios.Axios = Axios;
// Expose Cancel & CancelToken
axios.CanceledError = require('./cancel/CanceledError');
axios.CancelToken = require('./cancel/CancelToken');
axios.isCancel = require('./cancel/isCancel');
axios.VERSION = require('./env/data').version;
axios.toFormData = require('./helpers/toFormData');
// Expose AxiosError class
axios.AxiosError = require('../lib/core/AxiosError');
// alias for CanceledError for backward compatibility
axios.Cancel = axios.CanceledError;
// Expose all/spread
axios.all = function all(promises) {
return Promise.all(promises);
};
axios.spread = require('./helpers/spread');
// Expose isAxiosError
axios.isAxiosError = require('./helpers/isAxiosError');
axios.formToJSON = function(thing) {
return formDataToJSON(utils.isHTMLForm(thing) ? new FormData(thing) : thing);
};
module.exports = axios;
// Allow use of default import syntax in TypeScript
module.exports.default = axios;

118
node_modules/axios/lib/cancel/CancelToken.js generated vendored Normal file
View File

@ -0,0 +1,118 @@
'use strict';
var CanceledError = require('./CanceledError');
/**
* A `CancelToken` is an object that can be used to request cancellation of an operation.
*
* @class
* @param {Function} executor The executor function.
*/
function CancelToken(executor) {
if (typeof executor !== 'function') {
throw new TypeError('executor must be a function.');
}
var resolvePromise;
this.promise = new Promise(function promiseExecutor(resolve) {
resolvePromise = resolve;
});
var token = this;
// eslint-disable-next-line func-names
this.promise.then(function(cancel) {
if (!token._listeners) return;
var i = token._listeners.length;
while (i-- > 0) {
token._listeners[i](cancel);
}
token._listeners = null;
});
// eslint-disable-next-line func-names
this.promise.then = function(onfulfilled) {
var _resolve;
// eslint-disable-next-line func-names
var promise = new Promise(function(resolve) {
token.subscribe(resolve);
_resolve = resolve;
}).then(onfulfilled);
promise.cancel = function reject() {
token.unsubscribe(_resolve);
};
return promise;
};
executor(function cancel(message, config, request) {
if (token.reason) {
// Cancellation has already been requested
return;
}
token.reason = new CanceledError(message, config, request);
resolvePromise(token.reason);
});
}
/**
* Throws a `CanceledError` if cancellation has been requested.
*/
CancelToken.prototype.throwIfRequested = function throwIfRequested() {
if (this.reason) {
throw this.reason;
}
};
/**
* Subscribe to the cancel signal
*/
CancelToken.prototype.subscribe = function subscribe(listener) {
if (this.reason) {
listener(this.reason);
return;
}
if (this._listeners) {
this._listeners.push(listener);
} else {
this._listeners = [listener];
}
};
/**
* Unsubscribe from the cancel signal
*/
CancelToken.prototype.unsubscribe = function unsubscribe(listener) {
if (!this._listeners) {
return;
}
var index = this._listeners.indexOf(listener);
if (index !== -1) {
this._listeners.splice(index, 1);
}
};
/**
* Returns an object that contains a new `CancelToken` and a function that, when called,
* cancels the `CancelToken`.
*/
CancelToken.source = function source() {
var cancel;
var token = new CancelToken(function executor(c) {
cancel = c;
});
return {
token: token,
cancel: cancel
};
};
module.exports = CancelToken;

24
node_modules/axios/lib/cancel/CanceledError.js generated vendored Normal file
View File

@ -0,0 +1,24 @@
'use strict';
var AxiosError = require('../core/AxiosError');
var utils = require('../utils');
/**
* A `CanceledError` is an object that is thrown when an operation is canceled.
*
* @class
* @param {string=} message The message.
* @param {Object=} config The config.
* @param {Object=} request The request.
*/
function CanceledError(message, config, request) {
// eslint-disable-next-line no-eq-null,eqeqeq
AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request);
this.name = 'CanceledError';
}
utils.inherits(CanceledError, AxiosError, {
__CANCEL__: true
});
module.exports = CanceledError;

5
node_modules/axios/lib/cancel/isCancel.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
'use strict';
module.exports = function isCancel(value) {
return !!(value && value.__CANCEL__);
};

172
node_modules/axios/lib/core/Axios.js generated vendored Normal file
View File

@ -0,0 +1,172 @@
'use strict';
var utils = require('./../utils');
var buildURL = require('../helpers/buildURL');
var InterceptorManager = require('./InterceptorManager');
var dispatchRequest = require('./dispatchRequest');
var mergeConfig = require('./mergeConfig');
var buildFullPath = require('./buildFullPath');
var validator = require('../helpers/validator');
var validators = validator.validators;
/**
* Create a new instance of Axios
*
* @param {Object} instanceConfig The default config for the instance
*/
function Axios(instanceConfig) {
this.defaults = instanceConfig;
this.interceptors = {
request: new InterceptorManager(),
response: new InterceptorManager()
};
}
/**
* Dispatch a request
*
* @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults)
* @param {?Object} config
*/
Axios.prototype.request = function request(configOrUrl, config) {
/*eslint no-param-reassign:0*/
// Allow for axios('example/url'[, config]) a la fetch API
if (typeof configOrUrl === 'string') {
config = config || {};
config.url = configOrUrl;
} else {
config = configOrUrl || {};
}
config = mergeConfig(this.defaults, config);
// Set config.method
if (config.method) {
config.method = config.method.toLowerCase();
} else if (this.defaults.method) {
config.method = this.defaults.method.toLowerCase();
} else {
config.method = 'get';
}
var transitional = config.transitional;
if (transitional !== undefined) {
validator.assertOptions(transitional, {
silentJSONParsing: validators.transitional(validators.boolean),
forcedJSONParsing: validators.transitional(validators.boolean),
clarifyTimeoutError: validators.transitional(validators.boolean)
}, false);
}
var paramsSerializer = config.paramsSerializer;
if (paramsSerializer !== undefined) {
validator.assertOptions(paramsSerializer, {
encode: validators.function,
serialize: validators.function
}, true);
}
utils.isFunction(paramsSerializer) && (config.paramsSerializer = {serialize: paramsSerializer});
// filter out skipped interceptors
var requestInterceptorChain = [];
var synchronousRequestInterceptors = true;
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) {
return;
}
synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous;
requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected);
});
var responseInterceptorChain = [];
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected);
});
var promise;
if (!synchronousRequestInterceptors) {
var chain = [dispatchRequest, undefined];
Array.prototype.unshift.apply(chain, requestInterceptorChain);
chain = chain.concat(responseInterceptorChain);
promise = Promise.resolve(config);
while (chain.length) {
promise = promise.then(chain.shift(), chain.shift());
}
return promise;
}
var newConfig = config;
while (requestInterceptorChain.length) {
var onFulfilled = requestInterceptorChain.shift();
var onRejected = requestInterceptorChain.shift();
try {
newConfig = onFulfilled(newConfig);
} catch (error) {
onRejected(error);
break;
}
}
try {
promise = dispatchRequest(newConfig);
} catch (error) {
return Promise.reject(error);
}
while (responseInterceptorChain.length) {
promise = promise.then(responseInterceptorChain.shift(), responseInterceptorChain.shift());
}
return promise;
};
Axios.prototype.getUri = function getUri(config) {
config = mergeConfig(this.defaults, config);
var fullPath = buildFullPath(config.baseURL, config.url);
return buildURL(fullPath, config.params, config.paramsSerializer);
};
// Provide aliases for supported request methods
utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) {
/*eslint func-names:0*/
Axios.prototype[method] = function(url, config) {
return this.request(mergeConfig(config || {}, {
method: method,
url: url,
data: (config || {}).data
}));
};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
/*eslint func-names:0*/
function generateHTTPMethod(isForm) {
return function httpMethod(url, data, config) {
return this.request(mergeConfig(config || {}, {
method: method,
headers: isForm ? {
'Content-Type': 'multipart/form-data'
} : {},
url: url,
data: data
}));
};
}
Axios.prototype[method] = generateHTTPMethod();
Axios.prototype[method + 'Form'] = generateHTTPMethod(true);
});
module.exports = Axios;

97
node_modules/axios/lib/core/AxiosError.js generated vendored Normal file
View File

@ -0,0 +1,97 @@
'use strict';
var utils = require('../utils');
/**
* Create an Error with the specified message, config, error code, request and response.
*
* @param {string} message The error message.
* @param {string} [code] The error code (for example, 'ECONNABORTED').
* @param {Object} [config] The config.
* @param {Object} [request] The request.
* @param {Object} [response] The response.
* @returns {Error} The created error.
*/
function AxiosError(message, code, config, request, response) {
Error.call(this);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
} else {
this.stack = (new Error()).stack;
}
this.message = message;
this.name = 'AxiosError';
code && (this.code = code);
config && (this.config = config);
request && (this.request = request);
response && (this.response = response);
}
utils.inherits(AxiosError, Error, {
toJSON: function toJSON() {
return {
// Standard
message: this.message,
name: this.name,
// Microsoft
description: this.description,
number: this.number,
// Mozilla
fileName: this.fileName,
lineNumber: this.lineNumber,
columnNumber: this.columnNumber,
stack: this.stack,
// Axios
config: this.config,
code: this.code,
status: this.response && this.response.status ? this.response.status : null
};
}
});
var prototype = AxiosError.prototype;
var descriptors = {};
[
'ERR_BAD_OPTION_VALUE',
'ERR_BAD_OPTION',
'ECONNABORTED',
'ETIMEDOUT',
'ERR_NETWORK',
'ERR_FR_TOO_MANY_REDIRECTS',
'ERR_DEPRECATED',
'ERR_BAD_RESPONSE',
'ERR_BAD_REQUEST',
'ERR_CANCELED',
'ERR_NOT_SUPPORT',
'ERR_INVALID_URL'
// eslint-disable-next-line func-names
].forEach(function(code) {
descriptors[code] = {value: code};
});
Object.defineProperties(AxiosError, descriptors);
Object.defineProperty(prototype, 'isAxiosError', {value: true});
// eslint-disable-next-line func-names
AxiosError.from = function(error, code, config, request, response, customProps) {
var axiosError = Object.create(prototype);
utils.toFlatObject(error, axiosError, function filter(obj) {
return obj !== Error.prototype;
});
AxiosError.call(axiosError, error.message, code, config, request, response);
axiosError.cause = error;
axiosError.name = error.name;
customProps && Object.assign(axiosError, customProps);
return axiosError;
};
module.exports = AxiosError;

63
node_modules/axios/lib/core/InterceptorManager.js generated vendored Normal file
View File

@ -0,0 +1,63 @@
'use strict';
var utils = require('./../utils');
function InterceptorManager() {
this.handlers = [];
}
/**
* Add a new interceptor to the stack
*
* @param {Function} fulfilled The function to handle `then` for a `Promise`
* @param {Function} rejected The function to handle `reject` for a `Promise`
*
* @return {Number} An ID used to remove interceptor later
*/
InterceptorManager.prototype.use = function use(fulfilled, rejected, options) {
this.handlers.push({
fulfilled: fulfilled,
rejected: rejected,
synchronous: options ? options.synchronous : false,
runWhen: options ? options.runWhen : null
});
return this.handlers.length - 1;
};
/**
* Remove an interceptor from the stack
*
* @param {Number} id The ID that was returned by `use`
*/
InterceptorManager.prototype.eject = function eject(id) {
if (this.handlers[id]) {
this.handlers[id] = null;
}
};
/**
* Clear all interceptors from the stack
*/
InterceptorManager.prototype.clear = function clear() {
if (this.handlers) {
this.handlers = [];
}
};
/**
* Iterate over all the registered interceptors
*
* This method is particularly useful for skipping over any
* interceptors that may have become `null` calling `eject`.
*
* @param {Function} fn The function to call for each interceptor
*/
InterceptorManager.prototype.forEach = function forEach(fn) {
utils.forEach(this.handlers, function forEachHandler(h) {
if (h !== null) {
fn(h);
}
});
};
module.exports = InterceptorManager;

8
node_modules/axios/lib/core/README.md generated vendored Normal file
View File

@ -0,0 +1,8 @@
# axios // core
The modules found in `core/` should be modules that are specific to the domain logic of axios. These modules would most likely not make sense to be consumed outside of the axios module, as their logic is too specific. Some examples of core modules are:
- Dispatching requests
- Requests sent via `adapters/` (see lib/adapters/README.md)
- Managing interceptors
- Handling config

20
node_modules/axios/lib/core/buildFullPath.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
'use strict';
var isAbsoluteURL = require('../helpers/isAbsoluteURL');
var combineURLs = require('../helpers/combineURLs');
/**
* Creates a new URL by combining the baseURL with the requestedURL,
* only when the requestedURL is not already an absolute URL.
* If the requestURL is absolute, this function returns the requestedURL untouched.
*
* @param {string} baseURL The base URL
* @param {string} requestedURL Absolute or relative URL to combine
* @returns {string} The combined full path
*/
module.exports = function buildFullPath(baseURL, requestedURL) {
if (baseURL && !isAbsoluteURL(requestedURL)) {
return combineURLs(baseURL, requestedURL);
}
return requestedURL;
};

94
node_modules/axios/lib/core/dispatchRequest.js generated vendored Normal file
View File

@ -0,0 +1,94 @@
'use strict';
var utils = require('./../utils');
var transformData = require('./transformData');
var isCancel = require('../cancel/isCancel');
var defaults = require('../defaults');
var CanceledError = require('../cancel/CanceledError');
var normalizeHeaderName = require('../helpers/normalizeHeaderName');
/**
* Throws a `CanceledError` if cancellation has been requested.
*/
function throwIfCancellationRequested(config) {
if (config.cancelToken) {
config.cancelToken.throwIfRequested();
}
if (config.signal && config.signal.aborted) {
throw new CanceledError();
}
}
/**
* Dispatch a request to the server using the configured adapter.
*
* @param {object} config The config that is to be used for the request
* @returns {Promise} The Promise to be fulfilled
*/
module.exports = function dispatchRequest(config) {
throwIfCancellationRequested(config);
// Ensure headers exist
config.headers = config.headers || {};
// Transform request data
config.data = transformData.call(
config,
config.data,
config.headers,
null,
config.transformRequest
);
normalizeHeaderName(config.headers, 'Accept');
normalizeHeaderName(config.headers, 'Content-Type');
// Flatten headers
config.headers = utils.merge(
config.headers.common || {},
config.headers[config.method] || {},
config.headers
);
utils.forEach(
['delete', 'get', 'head', 'post', 'put', 'patch', 'common'],
function cleanHeaderConfig(method) {
delete config.headers[method];
}
);
var adapter = config.adapter || defaults.adapter;
return adapter(config).then(function onAdapterResolution(response) {
throwIfCancellationRequested(config);
// Transform response data
response.data = transformData.call(
config,
response.data,
response.headers,
response.status,
config.transformResponse
);
return response;
}, function onAdapterRejection(reason) {
if (!isCancel(reason)) {
throwIfCancellationRequested(config);
// Transform response data
if (reason && reason.response) {
reason.response.data = transformData.call(
config,
reason.response.data,
reason.response.headers,
reason.response.status,
config.transformResponse
);
}
}
return Promise.reject(reason);
});
};

103
node_modules/axios/lib/core/mergeConfig.js generated vendored Normal file
View File

@ -0,0 +1,103 @@
'use strict';
var utils = require('../utils');
/**
* Config-specific merge-function which creates a new config-object
* by merging two configuration objects together.
*
* @param {Object} config1
* @param {Object} config2
* @returns {Object} New object resulting from merging config2 to config1
*/
module.exports = function mergeConfig(config1, config2) {
// eslint-disable-next-line no-param-reassign
config2 = config2 || {};
var config = {};
function getMergedValue(target, source) {
if (utils.isPlainObject(target) && utils.isPlainObject(source)) {
return utils.merge(target, source);
} else if (utils.isEmptyObject(source)) {
return utils.merge({}, target);
} else if (utils.isPlainObject(source)) {
return utils.merge({}, source);
} else if (utils.isArray(source)) {
return source.slice();
}
return source;
}
// eslint-disable-next-line consistent-return
function mergeDeepProperties(prop) {
if (!utils.isUndefined(config2[prop])) {
return getMergedValue(config1[prop], config2[prop]);
} else if (!utils.isUndefined(config1[prop])) {
return getMergedValue(undefined, config1[prop]);
}
}
// eslint-disable-next-line consistent-return
function valueFromConfig2(prop) {
if (!utils.isUndefined(config2[prop])) {
return getMergedValue(undefined, config2[prop]);
}
}
// eslint-disable-next-line consistent-return
function defaultToConfig2(prop) {
if (!utils.isUndefined(config2[prop])) {
return getMergedValue(undefined, config2[prop]);
} else if (!utils.isUndefined(config1[prop])) {
return getMergedValue(undefined, config1[prop]);
}
}
// eslint-disable-next-line consistent-return
function mergeDirectKeys(prop) {
if (prop in config2) {
return getMergedValue(config1[prop], config2[prop]);
} else if (prop in config1) {
return getMergedValue(undefined, config1[prop]);
}
}
var mergeMap = {
'url': valueFromConfig2,
'method': valueFromConfig2,
'data': valueFromConfig2,
'baseURL': defaultToConfig2,
'transformRequest': defaultToConfig2,
'transformResponse': defaultToConfig2,
'paramsSerializer': defaultToConfig2,
'timeout': defaultToConfig2,
'timeoutMessage': defaultToConfig2,
'withCredentials': defaultToConfig2,
'withXSRFToken': defaultToConfig2,
'adapter': defaultToConfig2,
'responseType': defaultToConfig2,
'xsrfCookieName': defaultToConfig2,
'xsrfHeaderName': defaultToConfig2,
'onUploadProgress': defaultToConfig2,
'onDownloadProgress': defaultToConfig2,
'decompress': defaultToConfig2,
'maxContentLength': defaultToConfig2,
'maxBodyLength': defaultToConfig2,
'beforeRedirect': defaultToConfig2,
'transport': defaultToConfig2,
'httpAgent': defaultToConfig2,
'httpsAgent': defaultToConfig2,
'cancelToken': defaultToConfig2,
'socketPath': defaultToConfig2,
'responseEncoding': defaultToConfig2,
'validateStatus': mergeDirectKeys
};
utils.forEach(Object.keys(config1).concat(Object.keys(config2)), function computeConfigValue(prop) {
var merge = mergeMap[prop] || mergeDeepProperties;
var configValue = merge(prop);
(utils.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue);
});
return config;
};

25
node_modules/axios/lib/core/settle.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
'use strict';
var AxiosError = require('./AxiosError');
/**
* Resolve or reject a Promise based on response status.
*
* @param {Function} resolve A function that resolves the promise.
* @param {Function} reject A function that rejects the promise.
* @param {object} response The response.
*/
module.exports = function settle(resolve, reject, response) {
var validateStatus = response.config.validateStatus;
if (!response.status || !validateStatus || validateStatus(response.status)) {
resolve(response);
} else {
reject(new AxiosError(
'Request failed with status code ' + response.status,
[AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4],
response.config,
response.request,
response
));
}
};

23
node_modules/axios/lib/core/transformData.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
'use strict';
var utils = require('./../utils');
var defaults = require('../defaults');
/**
* Transform the data for a request or a response
*
* @param {Object|String} data The data to be transformed
* @param {Array} headers The headers for the request or response
* @param {Number} status HTTP status code
* @param {Array|Function} fns A single function or Array of functions
* @returns {*} The resulting transformed data
*/
module.exports = function transformData(data, headers, status, fns) {
var context = this || defaults;
/*eslint no-param-reassign:0*/
utils.forEach(fns, function transform(fn) {
data = fn.call(context, data, headers, status);
});
return data;
};

175
node_modules/axios/lib/defaults/index.js generated vendored Normal file
View File

@ -0,0 +1,175 @@
'use strict';
var utils = require('../utils');
var normalizeHeaderName = require('../helpers/normalizeHeaderName');
var AxiosError = require('../core/AxiosError');
var transitionalDefaults = require('./transitional');
var toFormData = require('../helpers/toFormData');
var toURLEncodedForm = require('../helpers/toURLEncodedForm');
var platform = require('../platform');
var formDataToJSON = require('../helpers/formDataToJSON');
var DEFAULT_CONTENT_TYPE = {
'Content-Type': 'application/x-www-form-urlencoded'
};
function setContentTypeIfUnset(headers, value) {
if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) {
headers['Content-Type'] = value;
}
}
function getDefaultAdapter() {
var adapter;
if (typeof XMLHttpRequest !== 'undefined') {
// For browsers use XHR adapter
adapter = require('../adapters/xhr');
} else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') {
// For node use HTTP adapter
adapter = require('../adapters/http');
}
return adapter;
}
function stringifySafely(rawValue, parser, encoder) {
if (utils.isString(rawValue)) {
try {
(parser || JSON.parse)(rawValue);
return utils.trim(rawValue);
} catch (e) {
if (e.name !== 'SyntaxError') {
throw e;
}
}
}
return (encoder || JSON.stringify)(rawValue);
}
var defaults = {
transitional: transitionalDefaults,
adapter: getDefaultAdapter(),
transformRequest: [function transformRequest(data, headers) {
normalizeHeaderName(headers, 'Accept');
normalizeHeaderName(headers, 'Content-Type');
var contentType = headers && headers['Content-Type'] || '';
var hasJSONContentType = contentType.indexOf('application/json') > -1;
var isObjectPayload = utils.isObject(data);
if (isObjectPayload && utils.isHTMLForm(data)) {
data = new FormData(data);
}
var isFormData = utils.isFormData(data);
if (isFormData) {
return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data;
}
if (utils.isArrayBuffer(data) ||
utils.isBuffer(data) ||
utils.isStream(data) ||
utils.isFile(data) ||
utils.isBlob(data)
) {
return data;
}
if (utils.isArrayBufferView(data)) {
return data.buffer;
}
if (utils.isURLSearchParams(data)) {
setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8');
return data.toString();
}
var isFileList;
if (isObjectPayload) {
if (contentType.indexOf('application/x-www-form-urlencoded') !== -1) {
return toURLEncodedForm(data, this.formSerializer).toString();
}
if ((isFileList = utils.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) {
var _FormData = this.env && this.env.FormData;
return toFormData(
isFileList ? {'files[]': data} : data,
_FormData && new _FormData(),
this.formSerializer
);
}
}
if (isObjectPayload || hasJSONContentType ) {
setContentTypeIfUnset(headers, 'application/json');
return stringifySafely(data);
}
return data;
}],
transformResponse: [function transformResponse(data) {
var transitional = this.transitional || defaults.transitional;
var forcedJSONParsing = transitional && transitional.forcedJSONParsing;
var JSONRequested = this.responseType === 'json';
if (data && utils.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
var silentJSONParsing = transitional && transitional.silentJSONParsing;
var strictJSONParsing = !silentJSONParsing && JSONRequested;
try {
return JSON.parse(data);
} catch (e) {
if (strictJSONParsing) {
if (e.name === 'SyntaxError') {
throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response);
}
throw e;
}
}
}
return data;
}],
/**
* A timeout in milliseconds to abort a request. If set to 0 (default) a
* timeout is not created.
*/
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
env: {
FormData: platform.classes.FormData,
Blob: platform.classes.Blob
},
validateStatus: function validateStatus(status) {
return status >= 200 && status < 300;
},
headers: {
common: {
'Accept': 'application/json, text/plain, */*'
}
}
};
utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) {
defaults.headers[method] = {};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE);
});
module.exports = defaults;

7
node_modules/axios/lib/defaults/transitional.js generated vendored Normal file
View File

@ -0,0 +1,7 @@
'use strict';
module.exports = {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
};

3
node_modules/axios/lib/env/README.md generated vendored Normal file
View File

@ -0,0 +1,3 @@
# axios // env
The `data.js` file is updated automatically when the package version is upgrading. Please do not edit it manually.

2
node_modules/axios/lib/env/classes/FormData.js generated vendored Normal file
View File

@ -0,0 +1,2 @@
// eslint-disable-next-line strict
module.exports = require('form-data');

3
node_modules/axios/lib/env/data.js generated vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = {
"version": "0.28.1"
};

42
node_modules/axios/lib/helpers/AxiosURLSearchParams.js generated vendored Normal file
View File

@ -0,0 +1,42 @@
'use strict';
var toFormData = require('./toFormData');
function encode(str) {
var charMap = {
'!': '%21',
"'": '%27',
'(': '%28',
')': '%29',
'~': '%7E',
'%20': '+',
'%00': '\x00'
};
return encodeURIComponent(str).replace(/[!'\(\)~]|%20|%00/g, function replacer(match) {
return charMap[match];
});
}
function AxiosURLSearchParams(params, options) {
this._pairs = [];
params && toFormData(params, this, options);
}
var prototype = AxiosURLSearchParams.prototype;
prototype.append = function append(name, value) {
this._pairs.push([name, value]);
};
prototype.toString = function toString(encoder) {
var _encode = encoder ? function(value) {
return encoder.call(this, value, encode);
} : encode;
return this._pairs.map(function each(pair) {
return _encode(pair[0]) + '=' + _encode(pair[1]);
}, '').join('&');
};
module.exports = AxiosURLSearchParams;

7
node_modules/axios/lib/helpers/README.md generated vendored Normal file
View File

@ -0,0 +1,7 @@
# axios // helpers
The modules found in `helpers/` should be generic modules that are _not_ specific to the domain logic of axios. These modules could theoretically be published to npm on their own and consumed by other modules or apps. Some examples of generic modules are things like:
- Browser polyfills
- Managing cookies
- Parsing HTTP headers

Some files were not shown because too many files have changed in this diff Show More