Adding the rest back in.
Some checks failed
Auto Maintenance Cycle / pre-commit Autoupdate (push) Failing after 34s
Some checks failed
Auto Maintenance Cycle / pre-commit Autoupdate (push) Failing after 34s
This commit is contained in:
parent
6e037d6837
commit
ff32b18cc4
15
node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
15
node_modules/@actions/core/lib/command.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
export interface CommandProperties {
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Commands
|
||||||
|
*
|
||||||
|
* Command Format:
|
||||||
|
* ::name key=value,key=value::message
|
||||||
|
*
|
||||||
|
* Examples:
|
||||||
|
* ::warning::This is the message
|
||||||
|
* ::set-env name=MY_VAR::some value
|
||||||
|
*/
|
||||||
|
export declare function issueCommand(command: string, properties: CommandProperties, message: any): void;
|
||||||
|
export declare function issue(name: string, message?: string): void;
|
92
node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
92
node_modules/@actions/core/lib/command.js
generated
vendored
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.issue = exports.issueCommand = void 0;
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const utils_1 = require("./utils");
|
||||||
|
/**
|
||||||
|
* Commands
|
||||||
|
*
|
||||||
|
* Command Format:
|
||||||
|
* ::name key=value,key=value::message
|
||||||
|
*
|
||||||
|
* Examples:
|
||||||
|
* ::warning::This is the message
|
||||||
|
* ::set-env name=MY_VAR::some value
|
||||||
|
*/
|
||||||
|
function issueCommand(command, properties, message) {
|
||||||
|
const cmd = new Command(command, properties, message);
|
||||||
|
process.stdout.write(cmd.toString() + os.EOL);
|
||||||
|
}
|
||||||
|
exports.issueCommand = issueCommand;
|
||||||
|
function issue(name, message = '') {
|
||||||
|
issueCommand(name, {}, message);
|
||||||
|
}
|
||||||
|
exports.issue = issue;
|
||||||
|
const CMD_STRING = '::';
|
||||||
|
class Command {
|
||||||
|
constructor(command, properties, message) {
|
||||||
|
if (!command) {
|
||||||
|
command = 'missing.command';
|
||||||
|
}
|
||||||
|
this.command = command;
|
||||||
|
this.properties = properties;
|
||||||
|
this.message = message;
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
let cmdStr = CMD_STRING + this.command;
|
||||||
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||||
|
cmdStr += ' ';
|
||||||
|
let first = true;
|
||||||
|
for (const key in this.properties) {
|
||||||
|
if (this.properties.hasOwnProperty(key)) {
|
||||||
|
const val = this.properties[key];
|
||||||
|
if (val) {
|
||||||
|
if (first) {
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
cmdStr += ',';
|
||||||
|
}
|
||||||
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||||||
|
return cmdStr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function escapeData(s) {
|
||||||
|
return utils_1.toCommandValue(s)
|
||||||
|
.replace(/%/g, '%25')
|
||||||
|
.replace(/\r/g, '%0D')
|
||||||
|
.replace(/\n/g, '%0A');
|
||||||
|
}
|
||||||
|
function escapeProperty(s) {
|
||||||
|
return utils_1.toCommandValue(s)
|
||||||
|
.replace(/%/g, '%25')
|
||||||
|
.replace(/\r/g, '%0D')
|
||||||
|
.replace(/\n/g, '%0A')
|
||||||
|
.replace(/:/g, '%3A')
|
||||||
|
.replace(/,/g, '%2C');
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=command.js.map
|
1
node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/command.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}
|
198
node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
198
node_modules/@actions/core/lib/core.d.ts
generated
vendored
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
/**
|
||||||
|
* Interface for getInput options
|
||||||
|
*/
|
||||||
|
export interface InputOptions {
|
||||||
|
/** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */
|
||||||
|
required?: boolean;
|
||||||
|
/** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */
|
||||||
|
trimWhitespace?: boolean;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The code to exit an action
|
||||||
|
*/
|
||||||
|
export declare enum ExitCode {
|
||||||
|
/**
|
||||||
|
* A code indicating that the action was successful
|
||||||
|
*/
|
||||||
|
Success = 0,
|
||||||
|
/**
|
||||||
|
* A code indicating that the action was a failure
|
||||||
|
*/
|
||||||
|
Failure = 1
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Optional properties that can be sent with annotation commands (notice, error, and warning)
|
||||||
|
* See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations.
|
||||||
|
*/
|
||||||
|
export interface AnnotationProperties {
|
||||||
|
/**
|
||||||
|
* A title for the annotation.
|
||||||
|
*/
|
||||||
|
title?: string;
|
||||||
|
/**
|
||||||
|
* The path of the file for which the annotation should be created.
|
||||||
|
*/
|
||||||
|
file?: string;
|
||||||
|
/**
|
||||||
|
* The start line for the annotation.
|
||||||
|
*/
|
||||||
|
startLine?: number;
|
||||||
|
/**
|
||||||
|
* The end line for the annotation. Defaults to `startLine` when `startLine` is provided.
|
||||||
|
*/
|
||||||
|
endLine?: number;
|
||||||
|
/**
|
||||||
|
* The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
||||||
|
*/
|
||||||
|
startColumn?: number;
|
||||||
|
/**
|
||||||
|
* The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values.
|
||||||
|
* Defaults to `startColumn` when `startColumn` is provided.
|
||||||
|
*/
|
||||||
|
endColumn?: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Sets env variable for this action and future actions in the job
|
||||||
|
* @param name the name of the variable to set
|
||||||
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
export declare function exportVariable(name: string, val: any): void;
|
||||||
|
/**
|
||||||
|
* Registers a secret which will get masked from logs
|
||||||
|
* @param secret value of the secret
|
||||||
|
*/
|
||||||
|
export declare function setSecret(secret: string): void;
|
||||||
|
/**
|
||||||
|
* Prepends inputPath to the PATH (for this action and future actions)
|
||||||
|
* @param inputPath
|
||||||
|
*/
|
||||||
|
export declare function addPath(inputPath: string): void;
|
||||||
|
/**
|
||||||
|
* Gets the value of an input.
|
||||||
|
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
||||||
|
* Returns an empty string if the value is not defined.
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
export declare function getInput(name: string, options?: InputOptions): string;
|
||||||
|
/**
|
||||||
|
* Gets the values of an multiline input. Each value is also trimmed.
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns string[]
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export declare function getMultilineInput(name: string, options?: InputOptions): string[];
|
||||||
|
/**
|
||||||
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
||||||
|
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
||||||
|
* The return value is also in boolean type.
|
||||||
|
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns boolean
|
||||||
|
*/
|
||||||
|
export declare function getBooleanInput(name: string, options?: InputOptions): boolean;
|
||||||
|
/**
|
||||||
|
* Sets the value of an output.
|
||||||
|
*
|
||||||
|
* @param name name of the output to set
|
||||||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
export declare function setOutput(name: string, value: any): void;
|
||||||
|
/**
|
||||||
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||||
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export declare function setCommandEcho(enabled: boolean): void;
|
||||||
|
/**
|
||||||
|
* Sets the action status to failed.
|
||||||
|
* When the action exits it will be with an exit code of 1
|
||||||
|
* @param message add error issue message
|
||||||
|
*/
|
||||||
|
export declare function setFailed(message: string | Error): void;
|
||||||
|
/**
|
||||||
|
* Gets whether Actions Step Debug is on or not
|
||||||
|
*/
|
||||||
|
export declare function isDebug(): boolean;
|
||||||
|
/**
|
||||||
|
* Writes debug message to user log
|
||||||
|
* @param message debug message
|
||||||
|
*/
|
||||||
|
export declare function debug(message: string): void;
|
||||||
|
/**
|
||||||
|
* Adds an error issue
|
||||||
|
* @param message error issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
export declare function error(message: string | Error, properties?: AnnotationProperties): void;
|
||||||
|
/**
|
||||||
|
* Adds a warning issue
|
||||||
|
* @param message warning issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
export declare function warning(message: string | Error, properties?: AnnotationProperties): void;
|
||||||
|
/**
|
||||||
|
* Adds a notice issue
|
||||||
|
* @param message notice issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
export declare function notice(message: string | Error, properties?: AnnotationProperties): void;
|
||||||
|
/**
|
||||||
|
* Writes info to log with console.log.
|
||||||
|
* @param message info message
|
||||||
|
*/
|
||||||
|
export declare function info(message: string): void;
|
||||||
|
/**
|
||||||
|
* Begin an output group.
|
||||||
|
*
|
||||||
|
* Output until the next `groupEnd` will be foldable in this group
|
||||||
|
*
|
||||||
|
* @param name The name of the output group
|
||||||
|
*/
|
||||||
|
export declare function startGroup(name: string): void;
|
||||||
|
/**
|
||||||
|
* End an output group.
|
||||||
|
*/
|
||||||
|
export declare function endGroup(): void;
|
||||||
|
/**
|
||||||
|
* Wrap an asynchronous function call in a group.
|
||||||
|
*
|
||||||
|
* Returns the same type as the function itself.
|
||||||
|
*
|
||||||
|
* @param name The name of the group
|
||||||
|
* @param fn The function to wrap in the group
|
||||||
|
*/
|
||||||
|
export declare function group<T>(name: string, fn: () => Promise<T>): Promise<T>;
|
||||||
|
/**
|
||||||
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||||
|
*
|
||||||
|
* @param name name of the state to store
|
||||||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
export declare function saveState(name: string, value: any): void;
|
||||||
|
/**
|
||||||
|
* Gets the value of an state set by this action's main execution.
|
||||||
|
*
|
||||||
|
* @param name name of the state to get
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
export declare function getState(name: string): string;
|
||||||
|
export declare function getIDToken(aud?: string): Promise<string>;
|
||||||
|
/**
|
||||||
|
* Summary exports
|
||||||
|
*/
|
||||||
|
export { summary } from './summary';
|
||||||
|
/**
|
||||||
|
* @deprecated use core.summary
|
||||||
|
*/
|
||||||
|
export { markdownSummary } from './summary';
|
||||||
|
/**
|
||||||
|
* Path exports
|
||||||
|
*/
|
||||||
|
export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils';
|
336
node_modules/@actions/core/lib/core.js
generated
vendored
Normal file
336
node_modules/@actions/core/lib/core.js
generated
vendored
Normal file
@ -0,0 +1,336 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||||||
|
const command_1 = require("./command");
|
||||||
|
const file_command_1 = require("./file-command");
|
||||||
|
const utils_1 = require("./utils");
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const oidc_utils_1 = require("./oidc-utils");
|
||||||
|
/**
|
||||||
|
* The code to exit an action
|
||||||
|
*/
|
||||||
|
var ExitCode;
|
||||||
|
(function (ExitCode) {
|
||||||
|
/**
|
||||||
|
* A code indicating that the action was successful
|
||||||
|
*/
|
||||||
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||||||
|
/**
|
||||||
|
* A code indicating that the action was a failure
|
||||||
|
*/
|
||||||
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||||||
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
// Variables
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
/**
|
||||||
|
* Sets env variable for this action and future actions in the job
|
||||||
|
* @param name the name of the variable to set
|
||||||
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
function exportVariable(name, val) {
|
||||||
|
const convertedVal = utils_1.toCommandValue(val);
|
||||||
|
process.env[name] = convertedVal;
|
||||||
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||||||
|
}
|
||||||
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||||
|
}
|
||||||
|
exports.exportVariable = exportVariable;
|
||||||
|
/**
|
||||||
|
* Registers a secret which will get masked from logs
|
||||||
|
* @param secret value of the secret
|
||||||
|
*/
|
||||||
|
function setSecret(secret) {
|
||||||
|
command_1.issueCommand('add-mask', {}, secret);
|
||||||
|
}
|
||||||
|
exports.setSecret = setSecret;
|
||||||
|
/**
|
||||||
|
* Prepends inputPath to the PATH (for this action and future actions)
|
||||||
|
* @param inputPath
|
||||||
|
*/
|
||||||
|
function addPath(inputPath) {
|
||||||
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
file_command_1.issueFileCommand('PATH', inputPath);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
command_1.issueCommand('add-path', {}, inputPath);
|
||||||
|
}
|
||||||
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||||||
|
}
|
||||||
|
exports.addPath = addPath;
|
||||||
|
/**
|
||||||
|
* Gets the value of an input.
|
||||||
|
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
|
||||||
|
* Returns an empty string if the value is not defined.
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
function getInput(name, options) {
|
||||||
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
||||||
|
if (options && options.required && !val) {
|
||||||
|
throw new Error(`Input required and not supplied: ${name}`);
|
||||||
|
}
|
||||||
|
if (options && options.trimWhitespace === false) {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
return val.trim();
|
||||||
|
}
|
||||||
|
exports.getInput = getInput;
|
||||||
|
/**
|
||||||
|
* Gets the values of an multiline input. Each value is also trimmed.
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns string[]
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
function getMultilineInput(name, options) {
|
||||||
|
const inputs = getInput(name, options)
|
||||||
|
.split('\n')
|
||||||
|
.filter(x => x !== '');
|
||||||
|
if (options && options.trimWhitespace === false) {
|
||||||
|
return inputs;
|
||||||
|
}
|
||||||
|
return inputs.map(input => input.trim());
|
||||||
|
}
|
||||||
|
exports.getMultilineInput = getMultilineInput;
|
||||||
|
/**
|
||||||
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
||||||
|
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
|
||||||
|
* The return value is also in boolean type.
|
||||||
|
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
|
||||||
|
*
|
||||||
|
* @param name name of the input to get
|
||||||
|
* @param options optional. See InputOptions.
|
||||||
|
* @returns boolean
|
||||||
|
*/
|
||||||
|
function getBooleanInput(name, options) {
|
||||||
|
const trueValue = ['true', 'True', 'TRUE'];
|
||||||
|
const falseValue = ['false', 'False', 'FALSE'];
|
||||||
|
const val = getInput(name, options);
|
||||||
|
if (trueValue.includes(val))
|
||||||
|
return true;
|
||||||
|
if (falseValue.includes(val))
|
||||||
|
return false;
|
||||||
|
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
|
||||||
|
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
|
||||||
|
}
|
||||||
|
exports.getBooleanInput = getBooleanInput;
|
||||||
|
/**
|
||||||
|
* Sets the value of an output.
|
||||||
|
*
|
||||||
|
* @param name name of the output to set
|
||||||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
function setOutput(name, value) {
|
||||||
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
|
process.stdout.write(os.EOL);
|
||||||
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||||||
|
}
|
||||||
|
exports.setOutput = setOutput;
|
||||||
|
/**
|
||||||
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||||
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
function setCommandEcho(enabled) {
|
||||||
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
||||||
|
}
|
||||||
|
exports.setCommandEcho = setCommandEcho;
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
// Results
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
/**
|
||||||
|
* Sets the action status to failed.
|
||||||
|
* When the action exits it will be with an exit code of 1
|
||||||
|
* @param message add error issue message
|
||||||
|
*/
|
||||||
|
function setFailed(message) {
|
||||||
|
process.exitCode = ExitCode.Failure;
|
||||||
|
error(message);
|
||||||
|
}
|
||||||
|
exports.setFailed = setFailed;
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
// Logging Commands
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
/**
|
||||||
|
* Gets whether Actions Step Debug is on or not
|
||||||
|
*/
|
||||||
|
function isDebug() {
|
||||||
|
return process.env['RUNNER_DEBUG'] === '1';
|
||||||
|
}
|
||||||
|
exports.isDebug = isDebug;
|
||||||
|
/**
|
||||||
|
* Writes debug message to user log
|
||||||
|
* @param message debug message
|
||||||
|
*/
|
||||||
|
function debug(message) {
|
||||||
|
command_1.issueCommand('debug', {}, message);
|
||||||
|
}
|
||||||
|
exports.debug = debug;
|
||||||
|
/**
|
||||||
|
* Adds an error issue
|
||||||
|
* @param message error issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
function error(message, properties = {}) {
|
||||||
|
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||||||
|
}
|
||||||
|
exports.error = error;
|
||||||
|
/**
|
||||||
|
* Adds a warning issue
|
||||||
|
* @param message warning issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
function warning(message, properties = {}) {
|
||||||
|
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||||||
|
}
|
||||||
|
exports.warning = warning;
|
||||||
|
/**
|
||||||
|
* Adds a notice issue
|
||||||
|
* @param message notice issue message. Errors will be converted to string via toString()
|
||||||
|
* @param properties optional properties to add to the annotation.
|
||||||
|
*/
|
||||||
|
function notice(message, properties = {}) {
|
||||||
|
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
|
||||||
|
}
|
||||||
|
exports.notice = notice;
|
||||||
|
/**
|
||||||
|
* Writes info to log with console.log.
|
||||||
|
* @param message info message
|
||||||
|
*/
|
||||||
|
function info(message) {
|
||||||
|
process.stdout.write(message + os.EOL);
|
||||||
|
}
|
||||||
|
exports.info = info;
|
||||||
|
/**
|
||||||
|
* Begin an output group.
|
||||||
|
*
|
||||||
|
* Output until the next `groupEnd` will be foldable in this group
|
||||||
|
*
|
||||||
|
* @param name The name of the output group
|
||||||
|
*/
|
||||||
|
function startGroup(name) {
|
||||||
|
command_1.issue('group', name);
|
||||||
|
}
|
||||||
|
exports.startGroup = startGroup;
|
||||||
|
/**
|
||||||
|
* End an output group.
|
||||||
|
*/
|
||||||
|
function endGroup() {
|
||||||
|
command_1.issue('endgroup');
|
||||||
|
}
|
||||||
|
exports.endGroup = endGroup;
|
||||||
|
/**
|
||||||
|
* Wrap an asynchronous function call in a group.
|
||||||
|
*
|
||||||
|
* Returns the same type as the function itself.
|
||||||
|
*
|
||||||
|
* @param name The name of the group
|
||||||
|
* @param fn The function to wrap in the group
|
||||||
|
*/
|
||||||
|
function group(name, fn) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
startGroup(name);
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = yield fn();
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
endGroup();
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.group = group;
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
// Wrapper action state
|
||||||
|
//-----------------------------------------------------------------------
|
||||||
|
/**
|
||||||
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||||
|
*
|
||||||
|
* @param name name of the state to store
|
||||||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||||
|
*/
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
function saveState(name, value) {
|
||||||
|
const filePath = process.env['GITHUB_STATE'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||||||
|
}
|
||||||
|
exports.saveState = saveState;
|
||||||
|
/**
|
||||||
|
* Gets the value of an state set by this action's main execution.
|
||||||
|
*
|
||||||
|
* @param name name of the state to get
|
||||||
|
* @returns string
|
||||||
|
*/
|
||||||
|
function getState(name) {
|
||||||
|
return process.env[`STATE_${name}`] || '';
|
||||||
|
}
|
||||||
|
exports.getState = getState;
|
||||||
|
function getIDToken(aud) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return yield oidc_utils_1.OidcClient.getIDToken(aud);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.getIDToken = getIDToken;
|
||||||
|
/**
|
||||||
|
* Summary exports
|
||||||
|
*/
|
||||||
|
var summary_1 = require("./summary");
|
||||||
|
Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } });
|
||||||
|
/**
|
||||||
|
* @deprecated use core.summary
|
||||||
|
*/
|
||||||
|
var summary_2 = require("./summary");
|
||||||
|
Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } });
|
||||||
|
/**
|
||||||
|
* Path exports
|
||||||
|
*/
|
||||||
|
var path_utils_1 = require("./path-utils");
|
||||||
|
Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });
|
||||||
|
Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });
|
||||||
|
Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });
|
||||||
|
//# sourceMappingURL=core.js.map
|
1
node_modules/@actions/core/lib/core.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/core.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/@actions/core/lib/file-command.d.ts
generated
vendored
Normal file
2
node_modules/@actions/core/lib/file-command.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
export declare function issueFileCommand(command: string, message: any): void;
|
||||||
|
export declare function prepareKeyValueMessage(key: string, value: any): string;
|
58
node_modules/@actions/core/lib/file-command.js
generated
vendored
Normal file
58
node_modules/@actions/core/lib/file-command.js
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
"use strict";
|
||||||
|
// For internal use, subject to change.
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||||
|
// We use any as a valid input type
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const uuid_1 = require("uuid");
|
||||||
|
const utils_1 = require("./utils");
|
||||||
|
function issueFileCommand(command, message) {
|
||||||
|
const filePath = process.env[`GITHUB_${command}`];
|
||||||
|
if (!filePath) {
|
||||||
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||||
|
}
|
||||||
|
if (!fs.existsSync(filePath)) {
|
||||||
|
throw new Error(`Missing file at path: ${filePath}`);
|
||||||
|
}
|
||||||
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||||||
|
encoding: 'utf8'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.issueFileCommand = issueFileCommand;
|
||||||
|
function prepareKeyValueMessage(key, value) {
|
||||||
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||||
|
const convertedValue = utils_1.toCommandValue(value);
|
||||||
|
// These should realistically never happen, but just in case someone finds a
|
||||||
|
// way to exploit uuid generation let's not allow keys or values that contain
|
||||||
|
// the delimiter.
|
||||||
|
if (key.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
if (convertedValue.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||||||
|
}
|
||||||
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||||
|
//# sourceMappingURL=file-command.js.map
|
1
node_modules/@actions/core/lib/file-command.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/file-command.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,+BAAiC;AACjC,mCAAsC;AAEtC,SAAgB,gBAAgB,CAAC,OAAe,EAAE,OAAY;IAC5D,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,4CAcC;AAED,SAAgB,sBAAsB,CAAC,GAAW,EAAE,KAAU;IAC5D,MAAM,SAAS,GAAG,gBAAgB,SAAM,EAAE,EAAE,CAAA;IAC5C,MAAM,cAAc,GAAG,sBAAc,CAAC,KAAK,CAAC,CAAA;IAE5C,4EAA4E;IAC5E,6EAA6E;IAC7E,iBAAiB;IACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,4DAA4D,SAAS,GAAG,CACzE,CAAA;KACF;IAED,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CACb,6DAA6D,SAAS,GAAG,CAC1E,CAAA;KACF;IAED,OAAO,GAAG,GAAG,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,cAAc,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;AAC9E,CAAC;AApBD,wDAoBC"}
|
7
node_modules/@actions/core/lib/oidc-utils.d.ts
generated
vendored
Normal file
7
node_modules/@actions/core/lib/oidc-utils.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
export declare class OidcClient {
|
||||||
|
private static createHttpClient;
|
||||||
|
private static getRequestToken;
|
||||||
|
private static getIDTokenUrl;
|
||||||
|
private static getCall;
|
||||||
|
static getIDToken(audience?: string): Promise<string>;
|
||||||
|
}
|
77
node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
Normal file
77
node_modules/@actions/core/lib/oidc-utils.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.OidcClient = void 0;
|
||||||
|
const http_client_1 = require("@actions/http-client");
|
||||||
|
const auth_1 = require("@actions/http-client/lib/auth");
|
||||||
|
const core_1 = require("./core");
|
||||||
|
class OidcClient {
|
||||||
|
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
||||||
|
const requestOptions = {
|
||||||
|
allowRetries: allowRetry,
|
||||||
|
maxRetries: maxRetry
|
||||||
|
};
|
||||||
|
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
|
||||||
|
}
|
||||||
|
static getRequestToken() {
|
||||||
|
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
|
||||||
|
if (!token) {
|
||||||
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
|
||||||
|
}
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
static getIDTokenUrl() {
|
||||||
|
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
|
||||||
|
if (!runtimeUrl) {
|
||||||
|
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
|
||||||
|
}
|
||||||
|
return runtimeUrl;
|
||||||
|
}
|
||||||
|
static getCall(id_token_url) {
|
||||||
|
var _a;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const httpclient = OidcClient.createHttpClient();
|
||||||
|
const res = yield httpclient
|
||||||
|
.getJson(id_token_url)
|
||||||
|
.catch(error => {
|
||||||
|
throw new Error(`Failed to get ID Token. \n
|
||||||
|
Error Code : ${error.statusCode}\n
|
||||||
|
Error Message: ${error.message}`);
|
||||||
|
});
|
||||||
|
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
|
||||||
|
if (!id_token) {
|
||||||
|
throw new Error('Response json body do not have ID Token field');
|
||||||
|
}
|
||||||
|
return id_token;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
static getIDToken(audience) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
try {
|
||||||
|
// New ID Token is requested from action service
|
||||||
|
let id_token_url = OidcClient.getIDTokenUrl();
|
||||||
|
if (audience) {
|
||||||
|
const encodedAudience = encodeURIComponent(audience);
|
||||||
|
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
|
||||||
|
}
|
||||||
|
core_1.debug(`ID token url is ${id_token_url}`);
|
||||||
|
const id_token = yield OidcClient.getCall(id_token_url);
|
||||||
|
core_1.setSecret(id_token);
|
||||||
|
return id_token;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
throw new Error(`Error message: ${error.message}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.OidcClient = OidcClient;
|
||||||
|
//# sourceMappingURL=oidc-utils.js.map
|
1
node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/oidc-utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,OAAO,EAAE,CAC/B,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"}
|
25
node_modules/@actions/core/lib/path-utils.d.ts
generated
vendored
Normal file
25
node_modules/@actions/core/lib/path-utils.d.ts
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
/**
|
||||||
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||||||
|
* replaced with /.
|
||||||
|
*
|
||||||
|
* @param pth. Path to transform.
|
||||||
|
* @return string Posix path.
|
||||||
|
*/
|
||||||
|
export declare function toPosixPath(pth: string): string;
|
||||||
|
/**
|
||||||
|
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
||||||
|
* replaced with \\.
|
||||||
|
*
|
||||||
|
* @param pth. Path to transform.
|
||||||
|
* @return string Win32 path.
|
||||||
|
*/
|
||||||
|
export declare function toWin32Path(pth: string): string;
|
||||||
|
/**
|
||||||
|
* toPlatformPath converts the given path to a platform-specific path. It does
|
||||||
|
* this by replacing instances of / and \ with the platform-specific path
|
||||||
|
* separator.
|
||||||
|
*
|
||||||
|
* @param pth The path to platformize.
|
||||||
|
* @return string The platform-specific path.
|
||||||
|
*/
|
||||||
|
export declare function toPlatformPath(pth: string): string;
|
58
node_modules/@actions/core/lib/path-utils.js
generated
vendored
Normal file
58
node_modules/@actions/core/lib/path-utils.js
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
/**
|
||||||
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||||||
|
* replaced with /.
|
||||||
|
*
|
||||||
|
* @param pth. Path to transform.
|
||||||
|
* @return string Posix path.
|
||||||
|
*/
|
||||||
|
function toPosixPath(pth) {
|
||||||
|
return pth.replace(/[\\]/g, '/');
|
||||||
|
}
|
||||||
|
exports.toPosixPath = toPosixPath;
|
||||||
|
/**
|
||||||
|
* toWin32Path converts the given path to the win32 form. On Linux, / will be
|
||||||
|
* replaced with \\.
|
||||||
|
*
|
||||||
|
* @param pth. Path to transform.
|
||||||
|
* @return string Win32 path.
|
||||||
|
*/
|
||||||
|
function toWin32Path(pth) {
|
||||||
|
return pth.replace(/[/]/g, '\\');
|
||||||
|
}
|
||||||
|
exports.toWin32Path = toWin32Path;
|
||||||
|
/**
|
||||||
|
* toPlatformPath converts the given path to a platform-specific path. It does
|
||||||
|
* this by replacing instances of / and \ with the platform-specific path
|
||||||
|
* separator.
|
||||||
|
*
|
||||||
|
* @param pth The path to platformize.
|
||||||
|
* @return string The platform-specific path.
|
||||||
|
*/
|
||||||
|
function toPlatformPath(pth) {
|
||||||
|
return pth.replace(/[/\\]/g, path.sep);
|
||||||
|
}
|
||||||
|
exports.toPlatformPath = toPlatformPath;
|
||||||
|
//# sourceMappingURL=path-utils.js.map
|
1
node_modules/@actions/core/lib/path-utils.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/path-utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"}
|
202
node_modules/@actions/core/lib/summary.d.ts
generated
vendored
Normal file
202
node_modules/@actions/core/lib/summary.d.ts
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
|
||||||
|
export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
|
||||||
|
export declare type SummaryTableRow = (SummaryTableCell | string)[];
|
||||||
|
export interface SummaryTableCell {
|
||||||
|
/**
|
||||||
|
* Cell content
|
||||||
|
*/
|
||||||
|
data: string;
|
||||||
|
/**
|
||||||
|
* Render cell as header
|
||||||
|
* (optional) default: false
|
||||||
|
*/
|
||||||
|
header?: boolean;
|
||||||
|
/**
|
||||||
|
* Number of columns the cell extends
|
||||||
|
* (optional) default: '1'
|
||||||
|
*/
|
||||||
|
colspan?: string;
|
||||||
|
/**
|
||||||
|
* Number of rows the cell extends
|
||||||
|
* (optional) default: '1'
|
||||||
|
*/
|
||||||
|
rowspan?: string;
|
||||||
|
}
|
||||||
|
export interface SummaryImageOptions {
|
||||||
|
/**
|
||||||
|
* The width of the image in pixels. Must be an integer without a unit.
|
||||||
|
* (optional)
|
||||||
|
*/
|
||||||
|
width?: string;
|
||||||
|
/**
|
||||||
|
* The height of the image in pixels. Must be an integer without a unit.
|
||||||
|
* (optional)
|
||||||
|
*/
|
||||||
|
height?: string;
|
||||||
|
}
|
||||||
|
export interface SummaryWriteOptions {
|
||||||
|
/**
|
||||||
|
* Replace all existing content in summary file with buffer contents
|
||||||
|
* (optional) default: false
|
||||||
|
*/
|
||||||
|
overwrite?: boolean;
|
||||||
|
}
|
||||||
|
declare class Summary {
|
||||||
|
private _buffer;
|
||||||
|
private _filePath?;
|
||||||
|
constructor();
|
||||||
|
/**
|
||||||
|
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
||||||
|
* Also checks r/w permissions.
|
||||||
|
*
|
||||||
|
* @returns step summary file path
|
||||||
|
*/
|
||||||
|
private filePath;
|
||||||
|
/**
|
||||||
|
* Wraps content in an HTML tag, adding any HTML attributes
|
||||||
|
*
|
||||||
|
* @param {string} tag HTML tag to wrap
|
||||||
|
* @param {string | null} content content within the tag
|
||||||
|
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
||||||
|
*
|
||||||
|
* @returns {string} content wrapped in HTML element
|
||||||
|
*/
|
||||||
|
private wrap;
|
||||||
|
/**
|
||||||
|
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
||||||
|
*
|
||||||
|
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
||||||
|
*
|
||||||
|
* @returns {Promise<Summary>} summary instance
|
||||||
|
*/
|
||||||
|
write(options?: SummaryWriteOptions): Promise<Summary>;
|
||||||
|
/**
|
||||||
|
* Clears the summary buffer and wipes the summary file
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
clear(): Promise<Summary>;
|
||||||
|
/**
|
||||||
|
* Returns the current summary buffer as a string
|
||||||
|
*
|
||||||
|
* @returns {string} string of summary buffer
|
||||||
|
*/
|
||||||
|
stringify(): string;
|
||||||
|
/**
|
||||||
|
* If the summary buffer is empty
|
||||||
|
*
|
||||||
|
* @returns {boolen} true if the buffer is empty
|
||||||
|
*/
|
||||||
|
isEmptyBuffer(): boolean;
|
||||||
|
/**
|
||||||
|
* Resets the summary buffer without writing to summary file
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
emptyBuffer(): Summary;
|
||||||
|
/**
|
||||||
|
* Adds raw text to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text content to add
|
||||||
|
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addRaw(text: string, addEOL?: boolean): Summary;
|
||||||
|
/**
|
||||||
|
* Adds the operating system-specific end-of-line marker to the buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addEOL(): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML codeblock to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} code content to render within fenced code block
|
||||||
|
* @param {string} lang (optional) language to syntax highlight code
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addCodeBlock(code: string, lang?: string): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML list to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string[]} items list of items to render
|
||||||
|
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addList(items: string[], ordered?: boolean): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML table to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {SummaryTableCell[]} rows table rows
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addTable(rows: SummaryTableRow[]): Summary;
|
||||||
|
/**
|
||||||
|
* Adds a collapsable HTML details element to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} label text for the closed state
|
||||||
|
* @param {string} content collapsable content
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addDetails(label: string, content: string): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML image tag to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} src path to the image you to embed
|
||||||
|
* @param {string} alt text description of the image
|
||||||
|
* @param {SummaryImageOptions} options (optional) addition image attributes
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addImage(src: string, alt: string, options?: SummaryImageOptions): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML section heading element
|
||||||
|
*
|
||||||
|
* @param {string} text heading text
|
||||||
|
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addHeading(text: string, level?: number | string): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML thematic break (<hr>) to the summary buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addSeparator(): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML line break (<br>) to the summary buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addBreak(): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML blockquote to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text quote text
|
||||||
|
* @param {string} cite (optional) citation url
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addQuote(text: string, cite?: string): Summary;
|
||||||
|
/**
|
||||||
|
* Adds an HTML anchor tag to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text link text/content
|
||||||
|
* @param {string} href hyperlink
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addLink(text: string, href: string): Summary;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @deprecated use `core.summary`
|
||||||
|
*/
|
||||||
|
export declare const markdownSummary: Summary;
|
||||||
|
export declare const summary: Summary;
|
||||||
|
export {};
|
283
node_modules/@actions/core/lib/summary.js
generated
vendored
Normal file
283
node_modules/@actions/core/lib/summary.js
generated
vendored
Normal file
@ -0,0 +1,283 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
||||||
|
const os_1 = require("os");
|
||||||
|
const fs_1 = require("fs");
|
||||||
|
const { access, appendFile, writeFile } = fs_1.promises;
|
||||||
|
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
||||||
|
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
||||||
|
class Summary {
|
||||||
|
constructor() {
|
||||||
|
this._buffer = '';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
|
||||||
|
* Also checks r/w permissions.
|
||||||
|
*
|
||||||
|
* @returns step summary file path
|
||||||
|
*/
|
||||||
|
filePath() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (this._filePath) {
|
||||||
|
return this._filePath;
|
||||||
|
}
|
||||||
|
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
|
||||||
|
if (!pathFromEnv) {
|
||||||
|
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
|
||||||
|
}
|
||||||
|
this._filePath = pathFromEnv;
|
||||||
|
return this._filePath;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Wraps content in an HTML tag, adding any HTML attributes
|
||||||
|
*
|
||||||
|
* @param {string} tag HTML tag to wrap
|
||||||
|
* @param {string | null} content content within the tag
|
||||||
|
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
|
||||||
|
*
|
||||||
|
* @returns {string} content wrapped in HTML element
|
||||||
|
*/
|
||||||
|
wrap(tag, content, attrs = {}) {
|
||||||
|
const htmlAttrs = Object.entries(attrs)
|
||||||
|
.map(([key, value]) => ` ${key}="${value}"`)
|
||||||
|
.join('');
|
||||||
|
if (!content) {
|
||||||
|
return `<${tag}${htmlAttrs}>`;
|
||||||
|
}
|
||||||
|
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
|
||||||
|
*
|
||||||
|
* @param {SummaryWriteOptions} [options] (optional) options for write operation
|
||||||
|
*
|
||||||
|
* @returns {Promise<Summary>} summary instance
|
||||||
|
*/
|
||||||
|
write(options) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
|
||||||
|
const filePath = yield this.filePath();
|
||||||
|
const writeFunc = overwrite ? writeFile : appendFile;
|
||||||
|
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
|
||||||
|
return this.emptyBuffer();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Clears the summary buffer and wipes the summary file
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
clear() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.emptyBuffer().write({ overwrite: true });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the current summary buffer as a string
|
||||||
|
*
|
||||||
|
* @returns {string} string of summary buffer
|
||||||
|
*/
|
||||||
|
stringify() {
|
||||||
|
return this._buffer;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* If the summary buffer is empty
|
||||||
|
*
|
||||||
|
* @returns {boolen} true if the buffer is empty
|
||||||
|
*/
|
||||||
|
isEmptyBuffer() {
|
||||||
|
return this._buffer.length === 0;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Resets the summary buffer without writing to summary file
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
emptyBuffer() {
|
||||||
|
this._buffer = '';
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds raw text to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text content to add
|
||||||
|
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addRaw(text, addEOL = false) {
|
||||||
|
this._buffer += text;
|
||||||
|
return addEOL ? this.addEOL() : this;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds the operating system-specific end-of-line marker to the buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addEOL() {
|
||||||
|
return this.addRaw(os_1.EOL);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML codeblock to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} code content to render within fenced code block
|
||||||
|
* @param {string} lang (optional) language to syntax highlight code
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addCodeBlock(code, lang) {
|
||||||
|
const attrs = Object.assign({}, (lang && { lang }));
|
||||||
|
const element = this.wrap('pre', this.wrap('code', code), attrs);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML list to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string[]} items list of items to render
|
||||||
|
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addList(items, ordered = false) {
|
||||||
|
const tag = ordered ? 'ol' : 'ul';
|
||||||
|
const listItems = items.map(item => this.wrap('li', item)).join('');
|
||||||
|
const element = this.wrap(tag, listItems);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML table to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {SummaryTableCell[]} rows table rows
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addTable(rows) {
|
||||||
|
const tableBody = rows
|
||||||
|
.map(row => {
|
||||||
|
const cells = row
|
||||||
|
.map(cell => {
|
||||||
|
if (typeof cell === 'string') {
|
||||||
|
return this.wrap('td', cell);
|
||||||
|
}
|
||||||
|
const { header, data, colspan, rowspan } = cell;
|
||||||
|
const tag = header ? 'th' : 'td';
|
||||||
|
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
|
||||||
|
return this.wrap(tag, data, attrs);
|
||||||
|
})
|
||||||
|
.join('');
|
||||||
|
return this.wrap('tr', cells);
|
||||||
|
})
|
||||||
|
.join('');
|
||||||
|
const element = this.wrap('table', tableBody);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds a collapsable HTML details element to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} label text for the closed state
|
||||||
|
* @param {string} content collapsable content
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addDetails(label, content) {
|
||||||
|
const element = this.wrap('details', this.wrap('summary', label) + content);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML image tag to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} src path to the image you to embed
|
||||||
|
* @param {string} alt text description of the image
|
||||||
|
* @param {SummaryImageOptions} options (optional) addition image attributes
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addImage(src, alt, options) {
|
||||||
|
const { width, height } = options || {};
|
||||||
|
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
|
||||||
|
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML section heading element
|
||||||
|
*
|
||||||
|
* @param {string} text heading text
|
||||||
|
* @param {number | string} [level=1] (optional) the heading level, default: 1
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addHeading(text, level) {
|
||||||
|
const tag = `h${level}`;
|
||||||
|
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
|
||||||
|
? tag
|
||||||
|
: 'h1';
|
||||||
|
const element = this.wrap(allowedTag, text);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML thematic break (<hr>) to the summary buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addSeparator() {
|
||||||
|
const element = this.wrap('hr', null);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML line break (<br>) to the summary buffer
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addBreak() {
|
||||||
|
const element = this.wrap('br', null);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML blockquote to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text quote text
|
||||||
|
* @param {string} cite (optional) citation url
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addQuote(text, cite) {
|
||||||
|
const attrs = Object.assign({}, (cite && { cite }));
|
||||||
|
const element = this.wrap('blockquote', text, attrs);
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Adds an HTML anchor tag to the summary buffer
|
||||||
|
*
|
||||||
|
* @param {string} text link text/content
|
||||||
|
* @param {string} href hyperlink
|
||||||
|
*
|
||||||
|
* @returns {Summary} summary instance
|
||||||
|
*/
|
||||||
|
addLink(text, href) {
|
||||||
|
const element = this.wrap('a', text, { href });
|
||||||
|
return this.addRaw(element).addEOL();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const _summary = new Summary();
|
||||||
|
/**
|
||||||
|
* @deprecated use `core.summary`
|
||||||
|
*/
|
||||||
|
exports.markdownSummary = _summary;
|
||||||
|
exports.summary = _summary;
|
||||||
|
//# sourceMappingURL=summary.js.map
|
1
node_modules/@actions/core/lib/summary.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/summary.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/@actions/core/lib/utils.d.ts
generated
vendored
Normal file
14
node_modules/@actions/core/lib/utils.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { AnnotationProperties } from './core';
|
||||||
|
import { CommandProperties } from './command';
|
||||||
|
/**
|
||||||
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||||
|
* @param input input to sanitize into a string
|
||||||
|
*/
|
||||||
|
export declare function toCommandValue(input: any): string;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param annotationProperties
|
||||||
|
* @returns The command properties to send with the actual annotation command
|
||||||
|
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
|
||||||
|
*/
|
||||||
|
export declare function toCommandProperties(annotationProperties: AnnotationProperties): CommandProperties;
|
40
node_modules/@actions/core/lib/utils.js
generated
vendored
Normal file
40
node_modules/@actions/core/lib/utils.js
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"use strict";
|
||||||
|
// We use any as a valid input type
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.toCommandProperties = exports.toCommandValue = void 0;
|
||||||
|
/**
|
||||||
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||||
|
* @param input input to sanitize into a string
|
||||||
|
*/
|
||||||
|
function toCommandValue(input) {
|
||||||
|
if (input === null || input === undefined) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
else if (typeof input === 'string' || input instanceof String) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
return JSON.stringify(input);
|
||||||
|
}
|
||||||
|
exports.toCommandValue = toCommandValue;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param annotationProperties
|
||||||
|
* @returns The command properties to send with the actual annotation command
|
||||||
|
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
|
||||||
|
*/
|
||||||
|
function toCommandProperties(annotationProperties) {
|
||||||
|
if (!Object.keys(annotationProperties).length) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
title: annotationProperties.title,
|
||||||
|
file: annotationProperties.file,
|
||||||
|
line: annotationProperties.startLine,
|
||||||
|
endLine: annotationProperties.endLine,
|
||||||
|
col: annotationProperties.startColumn,
|
||||||
|
endColumn: annotationProperties.endColumn
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.toCommandProperties = toCommandProperties;
|
||||||
|
//# sourceMappingURL=utils.js.map
|
1
node_modules/@actions/core/lib/utils.js.map
generated
vendored
Normal file
1
node_modules/@actions/core/lib/utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;;AAKvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC;AAED;;;;;GAKG;AACH,SAAgB,mBAAmB,CACjC,oBAA0C;IAE1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC,MAAM,EAAE;QAC7C,OAAO,EAAE,CAAA;KACV;IAED,OAAO;QACL,KAAK,EAAE,oBAAoB,CAAC,KAAK;QACjC,IAAI,EAAE,oBAAoB,CAAC,IAAI;QAC/B,IAAI,EAAE,oBAAoB,CAAC,SAAS;QACpC,OAAO,EAAE,oBAAoB,CAAC,OAAO;QACrC,GAAG,EAAE,oBAAoB,CAAC,WAAW;QACrC,SAAS,EAAE,oBAAoB,CAAC,SAAS;KAC1C,CAAA;AACH,CAAC;AAfD,kDAeC"}
|
29
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
29
node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import { WebhookPayload } from './interfaces';
|
||||||
|
export declare class Context {
|
||||||
|
/**
|
||||||
|
* Webhook payload object that triggered the workflow
|
||||||
|
*/
|
||||||
|
payload: WebhookPayload;
|
||||||
|
eventName: string;
|
||||||
|
sha: string;
|
||||||
|
ref: string;
|
||||||
|
workflow: string;
|
||||||
|
action: string;
|
||||||
|
actor: string;
|
||||||
|
job: string;
|
||||||
|
runNumber: number;
|
||||||
|
runId: number;
|
||||||
|
/**
|
||||||
|
* Hydrate the context from the environment
|
||||||
|
*/
|
||||||
|
constructor();
|
||||||
|
get issue(): {
|
||||||
|
owner: string;
|
||||||
|
repo: string;
|
||||||
|
number: number;
|
||||||
|
};
|
||||||
|
get repo(): {
|
||||||
|
owner: string;
|
||||||
|
repo: string;
|
||||||
|
};
|
||||||
|
}
|
50
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
50
node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Context = void 0;
|
||||||
|
const fs_1 = require("fs");
|
||||||
|
const os_1 = require("os");
|
||||||
|
class Context {
|
||||||
|
/**
|
||||||
|
* Hydrate the context from the environment
|
||||||
|
*/
|
||||||
|
constructor() {
|
||||||
|
this.payload = {};
|
||||||
|
if (process.env.GITHUB_EVENT_PATH) {
|
||||||
|
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
||||||
|
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const path = process.env.GITHUB_EVENT_PATH;
|
||||||
|
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.eventName = process.env.GITHUB_EVENT_NAME;
|
||||||
|
this.sha = process.env.GITHUB_SHA;
|
||||||
|
this.ref = process.env.GITHUB_REF;
|
||||||
|
this.workflow = process.env.GITHUB_WORKFLOW;
|
||||||
|
this.action = process.env.GITHUB_ACTION;
|
||||||
|
this.actor = process.env.GITHUB_ACTOR;
|
||||||
|
this.job = process.env.GITHUB_JOB;
|
||||||
|
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
|
||||||
|
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
|
||||||
|
}
|
||||||
|
get issue() {
|
||||||
|
const payload = this.payload;
|
||||||
|
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
||||||
|
}
|
||||||
|
get repo() {
|
||||||
|
if (process.env.GITHUB_REPOSITORY) {
|
||||||
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||||
|
return { owner, repo };
|
||||||
|
}
|
||||||
|
if (this.payload.repository) {
|
||||||
|
return {
|
||||||
|
owner: this.payload.repository.owner.login,
|
||||||
|
repo: this.payload.repository.name
|
||||||
|
};
|
||||||
|
}
|
||||||
|
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Context = Context;
|
||||||
|
//# sourceMappingURL=context.js.map
|
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAgBlB;;OAEG;IACH;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;QAC/C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAA2B,EAAE,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAuB,EAAE,EAAE,CAAC,CAAA;IAChE,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AApED,0BAoEC"}
|
11
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
11
node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import * as Context from './context';
|
||||||
|
import { GitHub } from './utils';
|
||||||
|
import { OctokitOptions } from '@octokit/core/dist-types/types';
|
||||||
|
export declare const context: Context.Context;
|
||||||
|
/**
|
||||||
|
* Returns a hydrated octokit ready to use for GitHub Actions
|
||||||
|
*
|
||||||
|
* @param token the repo PAT or GITHUB_TOKEN
|
||||||
|
* @param options other options to set
|
||||||
|
*/
|
||||||
|
export declare function getOctokit(token: string, options?: OctokitOptions): InstanceType<typeof GitHub>;
|
36
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
36
node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getOctokit = exports.context = void 0;
|
||||||
|
const Context = __importStar(require("./context"));
|
||||||
|
const utils_1 = require("./utils");
|
||||||
|
exports.context = new Context.Context();
|
||||||
|
/**
|
||||||
|
* Returns a hydrated octokit ready to use for GitHub Actions
|
||||||
|
*
|
||||||
|
* @param token the repo PAT or GITHUB_TOKEN
|
||||||
|
* @param options other options to set
|
||||||
|
*/
|
||||||
|
function getOctokit(token, options) {
|
||||||
|
return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));
|
||||||
|
}
|
||||||
|
exports.getOctokit = getOctokit;
|
||||||
|
//# sourceMappingURL=github.js.map
|
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/github.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"github.js","sourceRoot":"","sources":["../src/github.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mDAAoC;AACpC,mCAAiD;AAKpC,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C;;;;;GAKG;AACH,SAAgB,UAAU,CACxB,KAAa,EACb,OAAwB;IAExB,OAAO,IAAI,cAAM,CAAC,yBAAiB,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,CAAA;AACtD,CAAC;AALD,gCAKC"}
|
40
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
40
node_modules/@actions/github/lib/interfaces.d.ts
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
export interface PayloadRepository {
|
||||||
|
[key: string]: any;
|
||||||
|
full_name?: string;
|
||||||
|
name: string;
|
||||||
|
owner: {
|
||||||
|
[key: string]: any;
|
||||||
|
login: string;
|
||||||
|
name?: string;
|
||||||
|
};
|
||||||
|
html_url?: string;
|
||||||
|
}
|
||||||
|
export interface WebhookPayload {
|
||||||
|
[key: string]: any;
|
||||||
|
repository?: PayloadRepository;
|
||||||
|
issue?: {
|
||||||
|
[key: string]: any;
|
||||||
|
number: number;
|
||||||
|
html_url?: string;
|
||||||
|
body?: string;
|
||||||
|
};
|
||||||
|
pull_request?: {
|
||||||
|
[key: string]: any;
|
||||||
|
number: number;
|
||||||
|
html_url?: string;
|
||||||
|
body?: string;
|
||||||
|
};
|
||||||
|
sender?: {
|
||||||
|
[key: string]: any;
|
||||||
|
type: string;
|
||||||
|
};
|
||||||
|
action?: string;
|
||||||
|
installation?: {
|
||||||
|
id: number;
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
comment?: {
|
||||||
|
id: number;
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
}
|
4
node_modules/@actions/github/lib/interfaces.js
generated
vendored
Normal file
4
node_modules/@actions/github/lib/interfaces.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
"use strict";
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
//# sourceMappingURL=interfaces.js.map
|
1
node_modules/@actions/github/lib/interfaces.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/interfaces.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":";AAAA,uDAAuD"}
|
6
node_modules/@actions/github/lib/internal/utils.d.ts
generated
vendored
Normal file
6
node_modules/@actions/github/lib/internal/utils.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import { OctokitOptions } from '@octokit/core/dist-types/types';
|
||||||
|
export declare function getAuthString(token: string, options: OctokitOptions): string | undefined;
|
||||||
|
export declare function getProxyAgent(destinationUrl: string): http.Agent;
|
||||||
|
export declare function getApiBaseUrl(): string;
|
43
node_modules/@actions/github/lib/internal/utils.js
generated
vendored
Normal file
43
node_modules/@actions/github/lib/internal/utils.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
|
||||||
|
const httpClient = __importStar(require("@actions/http-client"));
|
||||||
|
function getAuthString(token, options) {
|
||||||
|
if (!token && !options.auth) {
|
||||||
|
throw new Error('Parameter token or opts.auth is required');
|
||||||
|
}
|
||||||
|
else if (token && options.auth) {
|
||||||
|
throw new Error('Parameters token and opts.auth may not both be specified');
|
||||||
|
}
|
||||||
|
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
||||||
|
}
|
||||||
|
exports.getAuthString = getAuthString;
|
||||||
|
function getProxyAgent(destinationUrl) {
|
||||||
|
const hc = new httpClient.HttpClient();
|
||||||
|
return hc.getAgent(destinationUrl);
|
||||||
|
}
|
||||||
|
exports.getProxyAgent = getProxyAgent;
|
||||||
|
function getApiBaseUrl() {
|
||||||
|
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
||||||
|
}
|
||||||
|
exports.getApiBaseUrl = getApiBaseUrl;
|
||||||
|
//# sourceMappingURL=utils.js.map
|
1
node_modules/@actions/github/lib/internal/utils.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/internal/utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/internal/utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,iEAAkD;AAGlD,SAAgB,aAAa,CAC3B,KAAa,EACb,OAAuB;IAEvB,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;QAC3B,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;SAAM,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,EAAE;QAChC,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAA;KAC5E;IAED,OAAO,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,KAAK,EAAE,CAAA;AAC3E,CAAC;AAXD,sCAWC;AAED,SAAgB,aAAa,CAAC,cAAsB;IAClD,MAAM,EAAE,GAAG,IAAI,UAAU,CAAC,UAAU,EAAE,CAAA;IACtC,OAAO,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;AACpC,CAAC;AAHD,sCAGC;AAED,SAAgB,aAAa;IAC3B,OAAO,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,wBAAwB,CAAA;AAClE,CAAC;AAFD,sCAEC"}
|
21
node_modules/@actions/github/lib/utils.d.ts
generated
vendored
Normal file
21
node_modules/@actions/github/lib/utils.d.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import * as Context from './context';
|
||||||
|
import { Octokit } from '@octokit/core';
|
||||||
|
import { OctokitOptions } from '@octokit/core/dist-types/types';
|
||||||
|
export declare const context: Context.Context;
|
||||||
|
export declare const GitHub: (new (...args: any[]) => {
|
||||||
|
[x: string]: any;
|
||||||
|
}) & {
|
||||||
|
new (...args: any[]): {
|
||||||
|
[x: string]: any;
|
||||||
|
};
|
||||||
|
plugins: any[];
|
||||||
|
} & typeof Octokit & import("@octokit/core/dist-types/types").Constructor<import("@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types").RestEndpointMethods & {
|
||||||
|
paginate: import("@octokit/plugin-paginate-rest").PaginateInterface;
|
||||||
|
}>;
|
||||||
|
/**
|
||||||
|
* Convience function to correctly format Octokit Options to pass into the constructor.
|
||||||
|
*
|
||||||
|
* @param token the repo PAT or GITHUB_TOKEN
|
||||||
|
* @param options other options to set
|
||||||
|
*/
|
||||||
|
export declare function getOctokitOptions(token: string, options?: OctokitOptions): OctokitOptions;
|
54
node_modules/@actions/github/lib/utils.js
generated
vendored
Normal file
54
node_modules/@actions/github/lib/utils.js
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getOctokitOptions = exports.GitHub = exports.context = void 0;
|
||||||
|
const Context = __importStar(require("./context"));
|
||||||
|
const Utils = __importStar(require("./internal/utils"));
|
||||||
|
// octokit + plugins
|
||||||
|
const core_1 = require("@octokit/core");
|
||||||
|
const plugin_rest_endpoint_methods_1 = require("@octokit/plugin-rest-endpoint-methods");
|
||||||
|
const plugin_paginate_rest_1 = require("@octokit/plugin-paginate-rest");
|
||||||
|
exports.context = new Context.Context();
|
||||||
|
const baseUrl = Utils.getApiBaseUrl();
|
||||||
|
const defaults = {
|
||||||
|
baseUrl,
|
||||||
|
request: {
|
||||||
|
agent: Utils.getProxyAgent(baseUrl)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);
|
||||||
|
/**
|
||||||
|
* Convience function to correctly format Octokit Options to pass into the constructor.
|
||||||
|
*
|
||||||
|
* @param token the repo PAT or GITHUB_TOKEN
|
||||||
|
* @param options other options to set
|
||||||
|
*/
|
||||||
|
function getOctokitOptions(token, options) {
|
||||||
|
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
|
||||||
|
// Auth
|
||||||
|
const auth = Utils.getAuthString(token, opts);
|
||||||
|
if (auth) {
|
||||||
|
opts.auth = auth;
|
||||||
|
}
|
||||||
|
return opts;
|
||||||
|
}
|
||||||
|
exports.getOctokitOptions = getOctokitOptions;
|
||||||
|
//# sourceMappingURL=utils.js.map
|
1
node_modules/@actions/github/lib/utils.js.map
generated
vendored
Normal file
1
node_modules/@actions/github/lib/utils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mDAAoC;AACpC,wDAAyC;AAEzC,oBAAoB;AACpB,wCAAqC;AAErC,wFAAyE;AACzE,wEAA0D;AAE7C,QAAA,OAAO,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,CAAA;AAE5C,MAAM,OAAO,GAAG,KAAK,CAAC,aAAa,EAAE,CAAA;AACrC,MAAM,QAAQ,GAAG;IACf,OAAO;IACP,OAAO,EAAE;QACP,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,OAAO,CAAC;KACpC;CACF,CAAA;AAEY,QAAA,MAAM,GAAG,cAAO,CAAC,MAAM,CAClC,kDAAmB,EACnB,mCAAY,CACb,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;AAEpB;;;;;GAKG;AACH,SAAgB,iBAAiB,CAC/B,KAAa,EACb,OAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,IAAI,EAAE,CAAC,CAAA,CAAC,iEAAiE;IAE/G,OAAO;IACP,MAAM,IAAI,GAAG,KAAK,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IAC7C,IAAI,IAAI,EAAE;QACR,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;KACjB;IAED,OAAO,IAAI,CAAA;AACb,CAAC;AAbD,8CAaC"}
|
26
node_modules/@actions/http-client/lib/auth.d.ts
generated
vendored
Normal file
26
node_modules/@actions/http-client/lib/auth.d.ts
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as ifm from './interfaces';
|
||||||
|
import { HttpClientResponse } from './index';
|
||||||
|
export declare class BasicCredentialHandler implements ifm.RequestHandler {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
constructor(username: string, password: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class BearerCredentialHandler implements ifm.RequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
81
node_modules/@actions/http-client/lib/auth.js
generated
vendored
Normal file
81
node_modules/@actions/http-client/lib/auth.js
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
|
||||||
|
class BasicCredentialHandler {
|
||||||
|
constructor(username, password) {
|
||||||
|
this.username = username;
|
||||||
|
this.password = password;
|
||||||
|
}
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
||||||
|
class BearerCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Bearer ${this.token}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
||||||
|
class PersonalAccessTokenCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||||||
|
//# sourceMappingURL=auth.js.map
|
1
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"}
|
130
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
Normal file
130
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as ifm from './interfaces';
|
||||||
|
import { ProxyAgent } from 'undici';
|
||||||
|
export declare enum HttpCodes {
|
||||||
|
OK = 200,
|
||||||
|
MultipleChoices = 300,
|
||||||
|
MovedPermanently = 301,
|
||||||
|
ResourceMoved = 302,
|
||||||
|
SeeOther = 303,
|
||||||
|
NotModified = 304,
|
||||||
|
UseProxy = 305,
|
||||||
|
SwitchProxy = 306,
|
||||||
|
TemporaryRedirect = 307,
|
||||||
|
PermanentRedirect = 308,
|
||||||
|
BadRequest = 400,
|
||||||
|
Unauthorized = 401,
|
||||||
|
PaymentRequired = 402,
|
||||||
|
Forbidden = 403,
|
||||||
|
NotFound = 404,
|
||||||
|
MethodNotAllowed = 405,
|
||||||
|
NotAcceptable = 406,
|
||||||
|
ProxyAuthenticationRequired = 407,
|
||||||
|
RequestTimeout = 408,
|
||||||
|
Conflict = 409,
|
||||||
|
Gone = 410,
|
||||||
|
TooManyRequests = 429,
|
||||||
|
InternalServerError = 500,
|
||||||
|
NotImplemented = 501,
|
||||||
|
BadGateway = 502,
|
||||||
|
ServiceUnavailable = 503,
|
||||||
|
GatewayTimeout = 504
|
||||||
|
}
|
||||||
|
export declare enum Headers {
|
||||||
|
Accept = "accept",
|
||||||
|
ContentType = "content-type"
|
||||||
|
}
|
||||||
|
export declare enum MediaTypes {
|
||||||
|
ApplicationJson = "application/json"
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
export declare function getProxyUrl(serverUrl: string): string;
|
||||||
|
export declare class HttpClientError extends Error {
|
||||||
|
constructor(message: string, statusCode: number);
|
||||||
|
statusCode: number;
|
||||||
|
result?: any;
|
||||||
|
}
|
||||||
|
export declare class HttpClientResponse {
|
||||||
|
constructor(message: http.IncomingMessage);
|
||||||
|
message: http.IncomingMessage;
|
||||||
|
readBody(): Promise<string>;
|
||||||
|
readBodyBuffer?(): Promise<Buffer>;
|
||||||
|
}
|
||||||
|
export declare function isHttps(requestUrl: string): boolean;
|
||||||
|
export declare class HttpClient {
|
||||||
|
userAgent: string | undefined;
|
||||||
|
handlers: ifm.RequestHandler[];
|
||||||
|
requestOptions: ifm.RequestOptions | undefined;
|
||||||
|
private _ignoreSslError;
|
||||||
|
private _socketTimeout;
|
||||||
|
private _allowRedirects;
|
||||||
|
private _allowRedirectDowngrade;
|
||||||
|
private _maxRedirects;
|
||||||
|
private _allowRetries;
|
||||||
|
private _maxRetries;
|
||||||
|
private _agent;
|
||||||
|
private _proxyAgent;
|
||||||
|
private _proxyAgentDispatcher;
|
||||||
|
private _keepAlive;
|
||||||
|
private _disposed;
|
||||||
|
constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
|
||||||
|
options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
getJson<T>(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose(): void;
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl: string): http.Agent;
|
||||||
|
getAgentDispatcher(serverUrl: string): ProxyAgent | undefined;
|
||||||
|
private _prepareRequest;
|
||||||
|
private _mergeHeaders;
|
||||||
|
private _getExistingOrDefaultHeader;
|
||||||
|
private _getAgent;
|
||||||
|
private _getProxyAgentDispatcher;
|
||||||
|
private _performExponentialBackoff;
|
||||||
|
private _processResponse;
|
||||||
|
}
|
656
node_modules/@actions/http-client/lib/index.js
generated
vendored
Normal file
656
node_modules/@actions/http-client/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,656 @@
|
|||||||
|
"use strict";
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||||||
|
const http = __importStar(require("http"));
|
||||||
|
const https = __importStar(require("https"));
|
||||||
|
const pm = __importStar(require("./proxy"));
|
||||||
|
const tunnel = __importStar(require("tunnel"));
|
||||||
|
const undici_1 = require("undici");
|
||||||
|
var HttpCodes;
|
||||||
|
(function (HttpCodes) {
|
||||||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||||||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||||||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||||||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||||||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||||||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||||||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||||||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||||||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||||||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||||||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||||||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||||||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||||||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||||||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||||||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||||||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||||||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||||
|
})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
|
||||||
|
var Headers;
|
||||||
|
(function (Headers) {
|
||||||
|
Headers["Accept"] = "accept";
|
||||||
|
Headers["ContentType"] = "content-type";
|
||||||
|
})(Headers || (exports.Headers = Headers = {}));
|
||||||
|
var MediaTypes;
|
||||||
|
(function (MediaTypes) {
|
||||||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||||||
|
})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
function getProxyUrl(serverUrl) {
|
||||||
|
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||||
|
return proxyUrl ? proxyUrl.href : '';
|
||||||
|
}
|
||||||
|
exports.getProxyUrl = getProxyUrl;
|
||||||
|
const HttpRedirectCodes = [
|
||||||
|
HttpCodes.MovedPermanently,
|
||||||
|
HttpCodes.ResourceMoved,
|
||||||
|
HttpCodes.SeeOther,
|
||||||
|
HttpCodes.TemporaryRedirect,
|
||||||
|
HttpCodes.PermanentRedirect
|
||||||
|
];
|
||||||
|
const HttpResponseRetryCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.GatewayTimeout
|
||||||
|
];
|
||||||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||||
|
const ExponentialBackoffCeiling = 10;
|
||||||
|
const ExponentialBackoffTimeSlice = 5;
|
||||||
|
class HttpClientError extends Error {
|
||||||
|
constructor(message, statusCode) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'HttpClientError';
|
||||||
|
this.statusCode = statusCode;
|
||||||
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientError = HttpClientError;
|
||||||
|
class HttpClientResponse {
|
||||||
|
constructor(message) {
|
||||||
|
this.message = message;
|
||||||
|
}
|
||||||
|
readBody() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let output = Buffer.alloc(0);
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
output = Buffer.concat([output, chunk]);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(output.toString());
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
readBodyBuffer() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const chunks = [];
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
|
function isHttps(requestUrl) {
|
||||||
|
const parsedUrl = new URL(requestUrl);
|
||||||
|
return parsedUrl.protocol === 'https:';
|
||||||
|
}
|
||||||
|
exports.isHttps = isHttps;
|
||||||
|
class HttpClient {
|
||||||
|
constructor(userAgent, handlers, requestOptions) {
|
||||||
|
this._ignoreSslError = false;
|
||||||
|
this._allowRedirects = true;
|
||||||
|
this._allowRedirectDowngrade = false;
|
||||||
|
this._maxRedirects = 50;
|
||||||
|
this._allowRetries = false;
|
||||||
|
this._maxRetries = 1;
|
||||||
|
this._keepAlive = false;
|
||||||
|
this._disposed = false;
|
||||||
|
this.userAgent = userAgent;
|
||||||
|
this.handlers = handlers || [];
|
||||||
|
this.requestOptions = requestOptions;
|
||||||
|
if (requestOptions) {
|
||||||
|
if (requestOptions.ignoreSslError != null) {
|
||||||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
||||||
|
}
|
||||||
|
this._socketTimeout = requestOptions.socketTimeout;
|
||||||
|
if (requestOptions.allowRedirects != null) {
|
||||||
|
this._allowRedirects = requestOptions.allowRedirects;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
||||||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRedirects != null) {
|
||||||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||||||
|
}
|
||||||
|
if (requestOptions.keepAlive != null) {
|
||||||
|
this._keepAlive = requestOptions.keepAlive;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRetries != null) {
|
||||||
|
this._allowRetries = requestOptions.allowRetries;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRetries != null) {
|
||||||
|
this._maxRetries = requestOptions.maxRetries;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
get(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
del(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
post(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
patch(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
put(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
head(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
getJson(requestUrl, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.get(requestUrl, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.post(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.put(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.patch(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
request(verb, requestUrl, data, headers) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (this._disposed) {
|
||||||
|
throw new Error('Client has already been disposed.');
|
||||||
|
}
|
||||||
|
const parsedUrl = new URL(requestUrl);
|
||||||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||||
|
// Only perform retries on reads since writes may not be idempotent.
|
||||||
|
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
|
||||||
|
? this._maxRetries + 1
|
||||||
|
: 1;
|
||||||
|
let numTries = 0;
|
||||||
|
let response;
|
||||||
|
do {
|
||||||
|
response = yield this.requestRaw(info, data);
|
||||||
|
// Check if it's an authentication challenge
|
||||||
|
if (response &&
|
||||||
|
response.message &&
|
||||||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
|
let authenticationHandler;
|
||||||
|
for (const handler of this.handlers) {
|
||||||
|
if (handler.canHandleAuthentication(response)) {
|
||||||
|
authenticationHandler = handler;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (authenticationHandler) {
|
||||||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// We have received an unauthorized response but have no handlers to handle it.
|
||||||
|
// Let the response return to the caller.
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let redirectsRemaining = this._maxRedirects;
|
||||||
|
while (response.message.statusCode &&
|
||||||
|
HttpRedirectCodes.includes(response.message.statusCode) &&
|
||||||
|
this._allowRedirects &&
|
||||||
|
redirectsRemaining > 0) {
|
||||||
|
const redirectUrl = response.message.headers['location'];
|
||||||
|
if (!redirectUrl) {
|
||||||
|
// if there's no location to redirect to, we won't
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const parsedRedirectUrl = new URL(redirectUrl);
|
||||||
|
if (parsedUrl.protocol === 'https:' &&
|
||||||
|
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
|
||||||
|
!this._allowRedirectDowngrade) {
|
||||||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||||
|
}
|
||||||
|
// we need to finish reading the response before reassigning response
|
||||||
|
// which will leak the open socket.
|
||||||
|
yield response.readBody();
|
||||||
|
// strip authorization header if redirected to a different hostname
|
||||||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||||
|
for (const header in headers) {
|
||||||
|
// header names are case insensitive
|
||||||
|
if (header.toLowerCase() === 'authorization') {
|
||||||
|
delete headers[header];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// let's make the request with the new redirectUrl
|
||||||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||||
|
response = yield this.requestRaw(info, data);
|
||||||
|
redirectsRemaining--;
|
||||||
|
}
|
||||||
|
if (!response.message.statusCode ||
|
||||||
|
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
|
||||||
|
// If not a retry code, return immediately instead of retrying
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
numTries += 1;
|
||||||
|
if (numTries < maxTries) {
|
||||||
|
yield response.readBody();
|
||||||
|
yield this._performExponentialBackoff(numTries);
|
||||||
|
}
|
||||||
|
} while (numTries < maxTries);
|
||||||
|
return response;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose() {
|
||||||
|
if (this._agent) {
|
||||||
|
this._agent.destroy();
|
||||||
|
}
|
||||||
|
this._disposed = true;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info, data) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
function callbackForResult(err, res) {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
else if (!res) {
|
||||||
|
// If `err` is not passed, then `res` must be passed.
|
||||||
|
reject(new Error('Unknown error'));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info, data, onResult) {
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
if (!info.options.headers) {
|
||||||
|
info.options.headers = {};
|
||||||
|
}
|
||||||
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||||
|
}
|
||||||
|
let callbackCalled = false;
|
||||||
|
function handleResult(err, res) {
|
||||||
|
if (!callbackCalled) {
|
||||||
|
callbackCalled = true;
|
||||||
|
onResult(err, res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const req = info.httpModule.request(info.options, (msg) => {
|
||||||
|
const res = new HttpClientResponse(msg);
|
||||||
|
handleResult(undefined, res);
|
||||||
|
});
|
||||||
|
let socket;
|
||||||
|
req.on('socket', sock => {
|
||||||
|
socket = sock;
|
||||||
|
});
|
||||||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||||||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||||||
|
if (socket) {
|
||||||
|
socket.end();
|
||||||
|
}
|
||||||
|
handleResult(new Error(`Request timeout: ${info.options.path}`));
|
||||||
|
});
|
||||||
|
req.on('error', function (err) {
|
||||||
|
// err has statusCode property
|
||||||
|
// res should have headers
|
||||||
|
handleResult(err);
|
||||||
|
});
|
||||||
|
if (data && typeof data === 'string') {
|
||||||
|
req.write(data, 'utf8');
|
||||||
|
}
|
||||||
|
if (data && typeof data !== 'string') {
|
||||||
|
data.on('close', function () {
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
data.pipe(req);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
req.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl) {
|
||||||
|
const parsedUrl = new URL(serverUrl);
|
||||||
|
return this._getAgent(parsedUrl);
|
||||||
|
}
|
||||||
|
getAgentDispatcher(serverUrl) {
|
||||||
|
const parsedUrl = new URL(serverUrl);
|
||||||
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
||||||
|
if (!useProxy) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
|
||||||
|
}
|
||||||
|
_prepareRequest(method, requestUrl, headers) {
|
||||||
|
const info = {};
|
||||||
|
info.parsedUrl = requestUrl;
|
||||||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||||||
|
info.httpModule = usingSsl ? https : http;
|
||||||
|
const defaultPort = usingSsl ? 443 : 80;
|
||||||
|
info.options = {};
|
||||||
|
info.options.host = info.parsedUrl.hostname;
|
||||||
|
info.options.port = info.parsedUrl.port
|
||||||
|
? parseInt(info.parsedUrl.port)
|
||||||
|
: defaultPort;
|
||||||
|
info.options.path =
|
||||||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||||
|
info.options.method = method;
|
||||||
|
info.options.headers = this._mergeHeaders(headers);
|
||||||
|
if (this.userAgent != null) {
|
||||||
|
info.options.headers['user-agent'] = this.userAgent;
|
||||||
|
}
|
||||||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||||||
|
// gives handlers an opportunity to participate
|
||||||
|
if (this.handlers) {
|
||||||
|
for (const handler of this.handlers) {
|
||||||
|
handler.prepareRequest(info.options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
_mergeHeaders(headers) {
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
|
||||||
|
}
|
||||||
|
return lowercaseKeys(headers || {});
|
||||||
|
}
|
||||||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||||
|
let clientHeader;
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||||
|
}
|
||||||
|
return additionalHeaders[header] || clientHeader || _default;
|
||||||
|
}
|
||||||
|
_getAgent(parsedUrl) {
|
||||||
|
let agent;
|
||||||
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
||||||
|
if (this._keepAlive && useProxy) {
|
||||||
|
agent = this._proxyAgent;
|
||||||
|
}
|
||||||
|
if (this._keepAlive && !useProxy) {
|
||||||
|
agent = this._agent;
|
||||||
|
}
|
||||||
|
// if agent is already assigned use that agent.
|
||||||
|
if (agent) {
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||||||
|
let maxSockets = 100;
|
||||||
|
if (this.requestOptions) {
|
||||||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||||||
|
}
|
||||||
|
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
|
||||||
|
if (proxyUrl && proxyUrl.hostname) {
|
||||||
|
const agentOptions = {
|
||||||
|
maxSockets,
|
||||||
|
keepAlive: this._keepAlive,
|
||||||
|
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
|
||||||
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
||||||
|
})), { host: proxyUrl.hostname, port: proxyUrl.port })
|
||||||
|
};
|
||||||
|
let tunnelAgent;
|
||||||
|
const overHttps = proxyUrl.protocol === 'https:';
|
||||||
|
if (usingSsl) {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
||||||
|
}
|
||||||
|
agent = tunnelAgent(agentOptions);
|
||||||
|
this._proxyAgent = agent;
|
||||||
|
}
|
||||||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||||
|
if (this._keepAlive && !agent) {
|
||||||
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
||||||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||||
|
this._agent = agent;
|
||||||
|
}
|
||||||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||||
|
if (!agent) {
|
||||||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||||
|
}
|
||||||
|
if (usingSsl && this._ignoreSslError) {
|
||||||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
|
// we have to cast it to any and change it directly
|
||||||
|
agent.options = Object.assign(agent.options || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
_getProxyAgentDispatcher(parsedUrl, proxyUrl) {
|
||||||
|
let proxyAgent;
|
||||||
|
if (this._keepAlive) {
|
||||||
|
proxyAgent = this._proxyAgentDispatcher;
|
||||||
|
}
|
||||||
|
// if agent is already assigned use that agent.
|
||||||
|
if (proxyAgent) {
|
||||||
|
return proxyAgent;
|
||||||
|
}
|
||||||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||||||
|
proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
|
||||||
|
token: `${proxyUrl.username}:${proxyUrl.password}`
|
||||||
|
})));
|
||||||
|
this._proxyAgentDispatcher = proxyAgent;
|
||||||
|
if (usingSsl && this._ignoreSslError) {
|
||||||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
|
// we have to cast it to any and change it directly
|
||||||
|
proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return proxyAgent;
|
||||||
|
}
|
||||||
|
_performExponentialBackoff(retryNumber) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||||||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_processResponse(res, options) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const statusCode = res.message.statusCode || 0;
|
||||||
|
const response = {
|
||||||
|
statusCode,
|
||||||
|
result: null,
|
||||||
|
headers: {}
|
||||||
|
};
|
||||||
|
// not found leads to null obj returned
|
||||||
|
if (statusCode === HttpCodes.NotFound) {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
// get the result from the body
|
||||||
|
function dateTimeDeserializer(key, value) {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
const a = new Date(value);
|
||||||
|
if (!isNaN(a.valueOf())) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
let obj;
|
||||||
|
let contents;
|
||||||
|
try {
|
||||||
|
contents = yield res.readBody();
|
||||||
|
if (contents && contents.length > 0) {
|
||||||
|
if (options && options.deserializeDates) {
|
||||||
|
obj = JSON.parse(contents, dateTimeDeserializer);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
obj = JSON.parse(contents);
|
||||||
|
}
|
||||||
|
response.result = obj;
|
||||||
|
}
|
||||||
|
response.headers = res.message.headers;
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
// Invalid resource (contents not json); leaving result obj null
|
||||||
|
}
|
||||||
|
// note that 3xx redirects are handled by the http layer.
|
||||||
|
if (statusCode > 299) {
|
||||||
|
let msg;
|
||||||
|
// if exception/error in body, attempt to get better error
|
||||||
|
if (obj && obj.message) {
|
||||||
|
msg = obj.message;
|
||||||
|
}
|
||||||
|
else if (contents && contents.length > 0) {
|
||||||
|
// it may be the case that the exception is in the body message as string
|
||||||
|
msg = contents;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
msg = `Failed request: (${statusCode})`;
|
||||||
|
}
|
||||||
|
const err = new HttpClientError(msg, statusCode);
|
||||||
|
err.result = response.result;
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClient = HttpClient;
|
||||||
|
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
|
//# sourceMappingURL=index.js.map
|
1
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
46
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
Normal file
46
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as https from 'https';
|
||||||
|
import { HttpClientResponse } from './index';
|
||||||
|
export interface HttpClient {
|
||||||
|
options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise<HttpClientResponse>;
|
||||||
|
requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
|
||||||
|
}
|
||||||
|
export interface RequestHandler {
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(response: HttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export interface RequestInfo {
|
||||||
|
options: http.RequestOptions;
|
||||||
|
parsedUrl: URL;
|
||||||
|
httpModule: typeof http | typeof https;
|
||||||
|
}
|
||||||
|
export interface RequestOptions {
|
||||||
|
headers?: http.OutgoingHttpHeaders;
|
||||||
|
socketTimeout?: number;
|
||||||
|
ignoreSslError?: boolean;
|
||||||
|
allowRedirects?: boolean;
|
||||||
|
allowRedirectDowngrade?: boolean;
|
||||||
|
maxRedirects?: number;
|
||||||
|
maxSockets?: number;
|
||||||
|
keepAlive?: boolean;
|
||||||
|
deserializeDates?: boolean;
|
||||||
|
allowRetries?: boolean;
|
||||||
|
maxRetries?: number;
|
||||||
|
}
|
||||||
|
export interface TypedResponse<T> {
|
||||||
|
statusCode: number;
|
||||||
|
result: T | null;
|
||||||
|
headers: http.IncomingHttpHeaders;
|
||||||
|
}
|
3
node_modules/@actions/http-client/lib/interfaces.js
generated
vendored
Normal file
3
node_modules/@actions/http-client/lib/interfaces.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
//# sourceMappingURL=interfaces.js.map
|
1
node_modules/@actions/http-client/lib/interfaces.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/interfaces.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""}
|
2
node_modules/@actions/http-client/lib/proxy.d.ts
generated
vendored
Normal file
2
node_modules/@actions/http-client/lib/proxy.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
export declare function getProxyUrl(reqUrl: URL): URL | undefined;
|
||||||
|
export declare function checkBypass(reqUrl: URL): boolean;
|
82
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
Normal file
82
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.checkBypass = exports.getProxyUrl = void 0;
|
||||||
|
function getProxyUrl(reqUrl) {
|
||||||
|
const usingSsl = reqUrl.protocol === 'https:';
|
||||||
|
if (checkBypass(reqUrl)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const proxyVar = (() => {
|
||||||
|
if (usingSsl) {
|
||||||
|
return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
if (proxyVar) {
|
||||||
|
try {
|
||||||
|
return new URL(proxyVar);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||||
|
return new URL(`http://${proxyVar}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.getProxyUrl = getProxyUrl;
|
||||||
|
function checkBypass(reqUrl) {
|
||||||
|
if (!reqUrl.hostname) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const reqHost = reqUrl.hostname;
|
||||||
|
if (isLoopbackAddress(reqHost)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
|
if (!noProxy) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Determine the request port
|
||||||
|
let reqPort;
|
||||||
|
if (reqUrl.port) {
|
||||||
|
reqPort = Number(reqUrl.port);
|
||||||
|
}
|
||||||
|
else if (reqUrl.protocol === 'http:') {
|
||||||
|
reqPort = 80;
|
||||||
|
}
|
||||||
|
else if (reqUrl.protocol === 'https:') {
|
||||||
|
reqPort = 443;
|
||||||
|
}
|
||||||
|
// Format the request hostname and hostname with port
|
||||||
|
const upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
||||||
|
if (typeof reqPort === 'number') {
|
||||||
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||||
|
}
|
||||||
|
// Compare request host against noproxy
|
||||||
|
for (const upperNoProxyItem of noProxy
|
||||||
|
.split(',')
|
||||||
|
.map(x => x.trim().toUpperCase())
|
||||||
|
.filter(x => x)) {
|
||||||
|
if (upperNoProxyItem === '*' ||
|
||||||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||||
|
(upperNoProxyItem.startsWith('.') &&
|
||||||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
exports.checkBypass = checkBypass;
|
||||||
|
function isLoopbackAddress(host) {
|
||||||
|
const hostLower = host.toLowerCase();
|
||||||
|
return (hostLower === 'localhost' ||
|
||||||
|
hostLower.startsWith('127.') ||
|
||||||
|
hostLower.startsWith('[::1]') ||
|
||||||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=proxy.js.map
|
1
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;SACzB;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,GAAG,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SACvC;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"}
|
207
node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
generated
vendored
Normal file
207
node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js
generated
vendored
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const WritableStream = require('node:stream').Writable
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
|
||||||
|
const StreamSearch = require('../../streamsearch/sbmh')
|
||||||
|
|
||||||
|
const PartStream = require('./PartStream')
|
||||||
|
const HeaderParser = require('./HeaderParser')
|
||||||
|
|
||||||
|
const DASH = 45
|
||||||
|
const B_ONEDASH = Buffer.from('-')
|
||||||
|
const B_CRLF = Buffer.from('\r\n')
|
||||||
|
const EMPTY_FN = function () {}
|
||||||
|
|
||||||
|
function Dicer (cfg) {
|
||||||
|
if (!(this instanceof Dicer)) { return new Dicer(cfg) }
|
||||||
|
WritableStream.call(this, cfg)
|
||||||
|
|
||||||
|
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
|
||||||
|
|
||||||
|
if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
|
||||||
|
|
||||||
|
this._headerFirst = cfg.headerFirst
|
||||||
|
|
||||||
|
this._dashes = 0
|
||||||
|
this._parts = 0
|
||||||
|
this._finished = false
|
||||||
|
this._realFinish = false
|
||||||
|
this._isPreamble = true
|
||||||
|
this._justMatched = false
|
||||||
|
this._firstWrite = true
|
||||||
|
this._inHeader = true
|
||||||
|
this._part = undefined
|
||||||
|
this._cb = undefined
|
||||||
|
this._ignoreData = false
|
||||||
|
this._partOpts = { highWaterMark: cfg.partHwm }
|
||||||
|
this._pause = false
|
||||||
|
|
||||||
|
const self = this
|
||||||
|
this._hparser = new HeaderParser(cfg)
|
||||||
|
this._hparser.on('header', function (header) {
|
||||||
|
self._inHeader = false
|
||||||
|
self._part.emit('header', header)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
inherits(Dicer, WritableStream)
|
||||||
|
|
||||||
|
Dicer.prototype.emit = function (ev) {
|
||||||
|
if (ev === 'finish' && !this._realFinish) {
|
||||||
|
if (!this._finished) {
|
||||||
|
const self = this
|
||||||
|
process.nextTick(function () {
|
||||||
|
self.emit('error', new Error('Unexpected end of multipart data'))
|
||||||
|
if (self._part && !self._ignoreData) {
|
||||||
|
const type = (self._isPreamble ? 'Preamble' : 'Part')
|
||||||
|
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
|
||||||
|
self._part.push(null)
|
||||||
|
process.nextTick(function () {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else { WritableStream.prototype.emit.apply(this, arguments) }
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._write = function (data, encoding, cb) {
|
||||||
|
// ignore unexpected data (e.g. extra trailer data after finished)
|
||||||
|
if (!this._hparser && !this._bparser) { return cb() }
|
||||||
|
|
||||||
|
if (this._headerFirst && this._isPreamble) {
|
||||||
|
if (!this._part) {
|
||||||
|
this._part = new PartStream(this._partOpts)
|
||||||
|
if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
|
||||||
|
}
|
||||||
|
const r = this._hparser.push(data)
|
||||||
|
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
|
||||||
|
}
|
||||||
|
|
||||||
|
// allows for "easier" testing
|
||||||
|
if (this._firstWrite) {
|
||||||
|
this._bparser.push(B_CRLF)
|
||||||
|
this._firstWrite = false
|
||||||
|
}
|
||||||
|
|
||||||
|
this._bparser.push(data)
|
||||||
|
|
||||||
|
if (this._pause) { this._cb = cb } else { cb() }
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype.reset = function () {
|
||||||
|
this._part = undefined
|
||||||
|
this._bparser = undefined
|
||||||
|
this._hparser = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype.setBoundary = function (boundary) {
|
||||||
|
const self = this
|
||||||
|
this._bparser = new StreamSearch('\r\n--' + boundary)
|
||||||
|
this._bparser.on('info', function (isMatch, data, start, end) {
|
||||||
|
self._oninfo(isMatch, data, start, end)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._ignore = function () {
|
||||||
|
if (this._part && !this._ignoreData) {
|
||||||
|
this._ignoreData = true
|
||||||
|
this._part.on('error', EMPTY_FN)
|
||||||
|
// we must perform some kind of read on the stream even though we are
|
||||||
|
// ignoring the data, otherwise node's Readable stream will not emit 'end'
|
||||||
|
// after pushing null to the stream
|
||||||
|
this._part.resume()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._oninfo = function (isMatch, data, start, end) {
|
||||||
|
let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
|
||||||
|
|
||||||
|
if (!this._part && this._justMatched && data) {
|
||||||
|
while (this._dashes < 2 && (start + i) < end) {
|
||||||
|
if (data[start + i] === DASH) {
|
||||||
|
++i
|
||||||
|
++this._dashes
|
||||||
|
} else {
|
||||||
|
if (this._dashes) { buf = B_ONEDASH }
|
||||||
|
this._dashes = 0
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._dashes === 2) {
|
||||||
|
if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
|
||||||
|
this.reset()
|
||||||
|
this._finished = true
|
||||||
|
// no more parts will be added
|
||||||
|
if (self._parts === 0) {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._dashes) { return }
|
||||||
|
}
|
||||||
|
if (this._justMatched) { this._justMatched = false }
|
||||||
|
if (!this._part) {
|
||||||
|
this._part = new PartStream(this._partOpts)
|
||||||
|
this._part._read = function (n) {
|
||||||
|
self._unpause()
|
||||||
|
}
|
||||||
|
if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
|
||||||
|
if (!this._isPreamble) { this._inHeader = true }
|
||||||
|
}
|
||||||
|
if (data && start < end && !this._ignoreData) {
|
||||||
|
if (this._isPreamble || !this._inHeader) {
|
||||||
|
if (buf) { shouldWriteMore = this._part.push(buf) }
|
||||||
|
shouldWriteMore = this._part.push(data.slice(start, end))
|
||||||
|
if (!shouldWriteMore) { this._pause = true }
|
||||||
|
} else if (!this._isPreamble && this._inHeader) {
|
||||||
|
if (buf) { this._hparser.push(buf) }
|
||||||
|
r = this._hparser.push(data.slice(start, end))
|
||||||
|
if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isMatch) {
|
||||||
|
this._hparser.reset()
|
||||||
|
if (this._isPreamble) { this._isPreamble = false } else {
|
||||||
|
if (start !== end) {
|
||||||
|
++this._parts
|
||||||
|
this._part.on('end', function () {
|
||||||
|
if (--self._parts === 0) {
|
||||||
|
if (self._finished) {
|
||||||
|
self._realFinish = true
|
||||||
|
self.emit('finish')
|
||||||
|
self._realFinish = false
|
||||||
|
} else {
|
||||||
|
self._unpause()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._part.push(null)
|
||||||
|
this._part = undefined
|
||||||
|
this._ignoreData = false
|
||||||
|
this._justMatched = true
|
||||||
|
this._dashes = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Dicer.prototype._unpause = function () {
|
||||||
|
if (!this._pause) { return }
|
||||||
|
|
||||||
|
this._pause = false
|
||||||
|
if (this._cb) {
|
||||||
|
const cb = this._cb
|
||||||
|
this._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Dicer
|
100
node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
generated
vendored
Normal file
100
node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const EventEmitter = require('node:events').EventEmitter
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
const getLimit = require('../../../lib/utils/getLimit')
|
||||||
|
|
||||||
|
const StreamSearch = require('../../streamsearch/sbmh')
|
||||||
|
|
||||||
|
const B_DCRLF = Buffer.from('\r\n\r\n')
|
||||||
|
const RE_CRLF = /\r\n/g
|
||||||
|
const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
|
||||||
|
|
||||||
|
function HeaderParser (cfg) {
|
||||||
|
EventEmitter.call(this)
|
||||||
|
|
||||||
|
cfg = cfg || {}
|
||||||
|
const self = this
|
||||||
|
this.nread = 0
|
||||||
|
this.maxed = false
|
||||||
|
this.npairs = 0
|
||||||
|
this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
|
||||||
|
this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
|
||||||
|
this.buffer = ''
|
||||||
|
this.header = {}
|
||||||
|
this.finished = false
|
||||||
|
this.ss = new StreamSearch(B_DCRLF)
|
||||||
|
this.ss.on('info', function (isMatch, data, start, end) {
|
||||||
|
if (data && !self.maxed) {
|
||||||
|
if (self.nread + end - start >= self.maxHeaderSize) {
|
||||||
|
end = self.maxHeaderSize - self.nread + start
|
||||||
|
self.nread = self.maxHeaderSize
|
||||||
|
self.maxed = true
|
||||||
|
} else { self.nread += (end - start) }
|
||||||
|
|
||||||
|
self.buffer += data.toString('binary', start, end)
|
||||||
|
}
|
||||||
|
if (isMatch) { self._finish() }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
inherits(HeaderParser, EventEmitter)
|
||||||
|
|
||||||
|
HeaderParser.prototype.push = function (data) {
|
||||||
|
const r = this.ss.push(data)
|
||||||
|
if (this.finished) { return r }
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype.reset = function () {
|
||||||
|
this.finished = false
|
||||||
|
this.buffer = ''
|
||||||
|
this.header = {}
|
||||||
|
this.ss.reset()
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype._finish = function () {
|
||||||
|
if (this.buffer) { this._parseHeader() }
|
||||||
|
this.ss.matches = this.ss.maxMatches
|
||||||
|
const header = this.header
|
||||||
|
this.header = {}
|
||||||
|
this.buffer = ''
|
||||||
|
this.finished = true
|
||||||
|
this.nread = this.npairs = 0
|
||||||
|
this.maxed = false
|
||||||
|
this.emit('header', header)
|
||||||
|
}
|
||||||
|
|
||||||
|
HeaderParser.prototype._parseHeader = function () {
|
||||||
|
if (this.npairs === this.maxHeaderPairs) { return }
|
||||||
|
|
||||||
|
const lines = this.buffer.split(RE_CRLF)
|
||||||
|
const len = lines.length
|
||||||
|
let m, h
|
||||||
|
|
||||||
|
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
if (lines[i].length === 0) { continue }
|
||||||
|
if (lines[i][0] === '\t' || lines[i][0] === ' ') {
|
||||||
|
// folded header content
|
||||||
|
// RFC2822 says to just remove the CRLF and not the whitespace following
|
||||||
|
// it, so we follow the RFC and include the leading whitespace ...
|
||||||
|
if (h) {
|
||||||
|
this.header[h][this.header[h].length - 1] += lines[i]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const posColon = lines[i].indexOf(':')
|
||||||
|
if (
|
||||||
|
posColon === -1 ||
|
||||||
|
posColon === 0
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m = RE_HDR.exec(lines[i])
|
||||||
|
h = m[1].toLowerCase()
|
||||||
|
this.header[h] = this.header[h] || []
|
||||||
|
this.header[h].push((m[2] || ''))
|
||||||
|
if (++this.npairs === this.maxHeaderPairs) { break }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = HeaderParser
|
13
node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
generated
vendored
Normal file
13
node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const inherits = require('node:util').inherits
|
||||||
|
const ReadableStream = require('node:stream').Readable
|
||||||
|
|
||||||
|
function PartStream (opts) {
|
||||||
|
ReadableStream.call(this, opts)
|
||||||
|
}
|
||||||
|
inherits(PartStream, ReadableStream)
|
||||||
|
|
||||||
|
PartStream.prototype._read = function (n) {}
|
||||||
|
|
||||||
|
module.exports = PartStream
|
164
node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
generated
vendored
Normal file
164
node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
// Type definitions for dicer 0.2
|
||||||
|
// Project: https://github.com/mscdex/dicer
|
||||||
|
// Definitions by: BendingBender <https://github.com/BendingBender>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
// TypeScript Version: 2.2
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import stream = require("stream");
|
||||||
|
|
||||||
|
// tslint:disable:unified-signatures
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A very fast streaming multipart parser for node.js.
|
||||||
|
* Dicer is a WritableStream
|
||||||
|
*
|
||||||
|
* Dicer (special) events:
|
||||||
|
* - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended.
|
||||||
|
* - on('part', (stream: PartStream)) - Emitted when a new part has been found.
|
||||||
|
* - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored).
|
||||||
|
* - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too).
|
||||||
|
*/
|
||||||
|
export class Dicer extends stream.Writable {
|
||||||
|
/**
|
||||||
|
* Creates and returns a new Dicer instance with the following valid config settings:
|
||||||
|
*
|
||||||
|
* @param config The configuration to use
|
||||||
|
*/
|
||||||
|
constructor(config: Dicer.Config);
|
||||||
|
/**
|
||||||
|
* Sets the boundary to use for parsing and performs some initialization needed for parsing.
|
||||||
|
* You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header.
|
||||||
|
*
|
||||||
|
* @param boundary The boundary to use
|
||||||
|
*/
|
||||||
|
setBoundary(boundary: string): void;
|
||||||
|
addListener(event: "finish", listener: () => void): this;
|
||||||
|
addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
addListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
addListener(event: "close", listener: () => void): this;
|
||||||
|
addListener(event: "drain", listener: () => void): this;
|
||||||
|
addListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
addListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
addListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "finish", listener: () => void): this;
|
||||||
|
on(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
on(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
on(event: "close", listener: () => void): this;
|
||||||
|
on(event: "drain", listener: () => void): this;
|
||||||
|
on(event: "error", listener: (err: Error) => void): this;
|
||||||
|
on(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
on(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "finish", listener: () => void): this;
|
||||||
|
once(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
once(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
once(event: "close", listener: () => void): this;
|
||||||
|
once(event: "drain", listener: () => void): this;
|
||||||
|
once(event: "error", listener: (err: Error) => void): this;
|
||||||
|
once(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
once(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "finish", listener: () => void): this;
|
||||||
|
prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
prependListener(event: "close", listener: () => void): this;
|
||||||
|
prependListener(event: "drain", listener: () => void): this;
|
||||||
|
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "finish", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
prependOnceListener(event: "close", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "drain", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
removeListener(event: "finish", listener: () => void): this;
|
||||||
|
removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this;
|
||||||
|
removeListener(event: "trailer", listener: (data: Buffer) => void): this;
|
||||||
|
removeListener(event: "close", listener: () => void): this;
|
||||||
|
removeListener(event: "drain", listener: () => void): this;
|
||||||
|
removeListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
removeListener(event: "pipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this;
|
||||||
|
removeListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
|
||||||
|
declare namespace Dicer {
|
||||||
|
interface Config {
|
||||||
|
/**
|
||||||
|
* This is the boundary used to detect the beginning of a new part.
|
||||||
|
*/
|
||||||
|
boundary?: string | undefined;
|
||||||
|
/**
|
||||||
|
* If true, preamble header parsing will be performed first.
|
||||||
|
*/
|
||||||
|
headerFirst?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http).
|
||||||
|
*/
|
||||||
|
maxHeaderPairs?: number | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PartStream is a _ReadableStream_
|
||||||
|
*
|
||||||
|
* PartStream (special) events:
|
||||||
|
* - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values.
|
||||||
|
*/
|
||||||
|
interface PartStream extends stream.Readable {
|
||||||
|
addListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
addListener(event: "close", listener: () => void): this;
|
||||||
|
addListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
addListener(event: "end", listener: () => void): this;
|
||||||
|
addListener(event: "readable", listener: () => void): this;
|
||||||
|
addListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
on(event: "header", listener: (header: object) => void): this;
|
||||||
|
on(event: "close", listener: () => void): this;
|
||||||
|
on(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
on(event: "end", listener: () => void): this;
|
||||||
|
on(event: "readable", listener: () => void): this;
|
||||||
|
on(event: "error", listener: (err: Error) => void): this;
|
||||||
|
on(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
once(event: "header", listener: (header: object) => void): this;
|
||||||
|
once(event: "close", listener: () => void): this;
|
||||||
|
once(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
once(event: "end", listener: () => void): this;
|
||||||
|
once(event: "readable", listener: () => void): this;
|
||||||
|
once(event: "error", listener: (err: Error) => void): this;
|
||||||
|
once(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
prependListener(event: "close", listener: () => void): this;
|
||||||
|
prependListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
prependListener(event: "end", listener: () => void): this;
|
||||||
|
prependListener(event: "readable", listener: () => void): this;
|
||||||
|
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
prependOnceListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
prependOnceListener(event: "close", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
prependOnceListener(event: "end", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "readable", listener: () => void): this;
|
||||||
|
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
removeListener(event: "header", listener: (header: object) => void): this;
|
||||||
|
removeListener(event: "close", listener: () => void): this;
|
||||||
|
removeListener(event: "data", listener: (chunk: Buffer | string) => void): this;
|
||||||
|
removeListener(event: "end", listener: () => void): this;
|
||||||
|
removeListener(event: "readable", listener: () => void): this;
|
||||||
|
removeListener(event: "error", listener: (err: Error) => void): this;
|
||||||
|
removeListener(event: string, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
}
|
196
node_modules/@fastify/busboy/lib/main.d.ts
generated
vendored
Normal file
196
node_modules/@fastify/busboy/lib/main.d.ts
generated
vendored
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
// Definitions by: Jacob Baskin <https://github.com/jacobbaskin>
|
||||||
|
// BendingBender <https://github.com/BendingBender>
|
||||||
|
// Igor Savin <https://github.com/kibertoad>
|
||||||
|
|
||||||
|
/// <reference types="node" />
|
||||||
|
|
||||||
|
import * as http from 'http';
|
||||||
|
import { Readable, Writable } from 'stream';
|
||||||
|
export { Dicer } from "../deps/dicer/lib/dicer";
|
||||||
|
|
||||||
|
export const Busboy: BusboyConstructor;
|
||||||
|
export default Busboy;
|
||||||
|
|
||||||
|
export interface BusboyConfig {
|
||||||
|
/**
|
||||||
|
* These are the HTTP headers of the incoming request, which are used by individual parsers.
|
||||||
|
*/
|
||||||
|
headers: BusboyHeaders;
|
||||||
|
/**
|
||||||
|
* `highWaterMark` to use for this Busboy instance.
|
||||||
|
* @default WritableStream default.
|
||||||
|
*/
|
||||||
|
highWaterMark?: number | undefined;
|
||||||
|
/**
|
||||||
|
* highWaterMark to use for file streams.
|
||||||
|
* @default ReadableStream default.
|
||||||
|
*/
|
||||||
|
fileHwm?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Default character set to use when one isn't defined.
|
||||||
|
* @default 'utf8'
|
||||||
|
*/
|
||||||
|
defCharset?: string | undefined;
|
||||||
|
/**
|
||||||
|
* Detect if a Part is a file.
|
||||||
|
*
|
||||||
|
* By default a file is detected if contentType
|
||||||
|
* is application/octet-stream or fileName is not
|
||||||
|
* undefined.
|
||||||
|
*
|
||||||
|
* Modify this to handle e.g. Blobs.
|
||||||
|
*/
|
||||||
|
isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
|
||||||
|
/**
|
||||||
|
* If paths in the multipart 'filename' field shall be preserved.
|
||||||
|
* @default false
|
||||||
|
*/
|
||||||
|
preservePath?: boolean | undefined;
|
||||||
|
/**
|
||||||
|
* Various limits on incoming data.
|
||||||
|
*/
|
||||||
|
limits?:
|
||||||
|
| {
|
||||||
|
/**
|
||||||
|
* Max field name size (in bytes)
|
||||||
|
* @default 100 bytes
|
||||||
|
*/
|
||||||
|
fieldNameSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Max field value size (in bytes)
|
||||||
|
* @default 1MB
|
||||||
|
*/
|
||||||
|
fieldSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* Max number of non-file fields
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
fields?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max file size (in bytes)
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
fileSize?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of file fields
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
files?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of parts (fields + files)
|
||||||
|
* @default Infinity
|
||||||
|
*/
|
||||||
|
parts?: number | undefined;
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max number of header key=>value pairs to parse
|
||||||
|
* @default 2000
|
||||||
|
*/
|
||||||
|
headerPairs?: number | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For multipart forms, the max size of a header part
|
||||||
|
* @default 81920
|
||||||
|
*/
|
||||||
|
headerSize?: number | undefined;
|
||||||
|
}
|
||||||
|
| undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders;
|
||||||
|
|
||||||
|
export interface BusboyFileStream extends
|
||||||
|
Readable {
|
||||||
|
|
||||||
|
truncated: boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of bytes that have been read so far.
|
||||||
|
*/
|
||||||
|
bytesRead: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Busboy extends Writable {
|
||||||
|
addListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
addListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
on<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
once<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
once(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
removeListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
off<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
off(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
prependListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
|
||||||
|
prependOnceListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;
|
||||||
|
|
||||||
|
prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BusboyEvents {
|
||||||
|
/**
|
||||||
|
* Emitted for each new file form field found.
|
||||||
|
*
|
||||||
|
* * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the
|
||||||
|
* file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents),
|
||||||
|
* otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any**
|
||||||
|
* incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically
|
||||||
|
* and safely discarded (these discarded files do still count towards `files` and `parts` limits).
|
||||||
|
* * If a configured file size limit was reached, `stream` will both have a boolean property `truncated`
|
||||||
|
* (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
|
||||||
|
*
|
||||||
|
* @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream.
|
||||||
|
* @param listener.mimeType Contains the 'Content-Type' value for the file stream.
|
||||||
|
*/
|
||||||
|
file: (
|
||||||
|
fieldname: string,
|
||||||
|
stream: BusboyFileStream,
|
||||||
|
filename: string,
|
||||||
|
transferEncoding: string,
|
||||||
|
mimeType: string,
|
||||||
|
) => void;
|
||||||
|
/**
|
||||||
|
* Emitted for each new non-file field found.
|
||||||
|
*/
|
||||||
|
field: (
|
||||||
|
fieldname: string,
|
||||||
|
value: string,
|
||||||
|
fieldnameTruncated: boolean,
|
||||||
|
valueTruncated: boolean,
|
||||||
|
transferEncoding: string,
|
||||||
|
mimeType: string,
|
||||||
|
) => void;
|
||||||
|
finish: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted.
|
||||||
|
*/
|
||||||
|
partsLimit: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `files` limit has been reached. No more 'file' events will be emitted.
|
||||||
|
*/
|
||||||
|
filesLimit: () => void;
|
||||||
|
/**
|
||||||
|
* Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted.
|
||||||
|
*/
|
||||||
|
fieldsLimit: () => void;
|
||||||
|
error: (error: unknown) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BusboyConstructor {
|
||||||
|
(options: BusboyConfig): Busboy;
|
||||||
|
|
||||||
|
new(options: BusboyConfig): Busboy;
|
||||||
|
}
|
||||||
|
|
85
node_modules/@fastify/busboy/lib/main.js
generated
vendored
Normal file
85
node_modules/@fastify/busboy/lib/main.js
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const WritableStream = require('node:stream').Writable
|
||||||
|
const { inherits } = require('node:util')
|
||||||
|
const Dicer = require('../deps/dicer/lib/Dicer')
|
||||||
|
|
||||||
|
const MultipartParser = require('./types/multipart')
|
||||||
|
const UrlencodedParser = require('./types/urlencoded')
|
||||||
|
const parseParams = require('./utils/parseParams')
|
||||||
|
|
||||||
|
function Busboy (opts) {
|
||||||
|
if (!(this instanceof Busboy)) { return new Busboy(opts) }
|
||||||
|
|
||||||
|
if (typeof opts !== 'object') {
|
||||||
|
throw new TypeError('Busboy expected an options-Object.')
|
||||||
|
}
|
||||||
|
if (typeof opts.headers !== 'object') {
|
||||||
|
throw new TypeError('Busboy expected an options-Object with headers-attribute.')
|
||||||
|
}
|
||||||
|
if (typeof opts.headers['content-type'] !== 'string') {
|
||||||
|
throw new TypeError('Missing Content-Type-header.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
headers,
|
||||||
|
...streamOptions
|
||||||
|
} = opts
|
||||||
|
|
||||||
|
this.opts = {
|
||||||
|
autoDestroy: false,
|
||||||
|
...streamOptions
|
||||||
|
}
|
||||||
|
WritableStream.call(this, this.opts)
|
||||||
|
|
||||||
|
this._done = false
|
||||||
|
this._parser = this.getParserByHeaders(headers)
|
||||||
|
this._finished = false
|
||||||
|
}
|
||||||
|
inherits(Busboy, WritableStream)
|
||||||
|
|
||||||
|
Busboy.prototype.emit = function (ev) {
|
||||||
|
if (ev === 'finish') {
|
||||||
|
if (!this._done) {
|
||||||
|
this._parser?.end()
|
||||||
|
return
|
||||||
|
} else if (this._finished) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this._finished = true
|
||||||
|
}
|
||||||
|
WritableStream.prototype.emit.apply(this, arguments)
|
||||||
|
}
|
||||||
|
|
||||||
|
Busboy.prototype.getParserByHeaders = function (headers) {
|
||||||
|
const parsed = parseParams(headers['content-type'])
|
||||||
|
|
||||||
|
const cfg = {
|
||||||
|
defCharset: this.opts.defCharset,
|
||||||
|
fileHwm: this.opts.fileHwm,
|
||||||
|
headers,
|
||||||
|
highWaterMark: this.opts.highWaterMark,
|
||||||
|
isPartAFile: this.opts.isPartAFile,
|
||||||
|
limits: this.opts.limits,
|
||||||
|
parsedConType: parsed,
|
||||||
|
preservePath: this.opts.preservePath
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MultipartParser.detect.test(parsed[0])) {
|
||||||
|
return new MultipartParser(this, cfg)
|
||||||
|
}
|
||||||
|
if (UrlencodedParser.detect.test(parsed[0])) {
|
||||||
|
return new UrlencodedParser(this, cfg)
|
||||||
|
}
|
||||||
|
throw new Error('Unsupported Content-Type.')
|
||||||
|
}
|
||||||
|
|
||||||
|
Busboy.prototype._write = function (chunk, encoding, cb) {
|
||||||
|
this._parser.write(chunk, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Busboy
|
||||||
|
module.exports.default = Busboy
|
||||||
|
module.exports.Busboy = Busboy
|
||||||
|
|
||||||
|
module.exports.Dicer = Dicer
|
306
node_modules/@fastify/busboy/lib/types/multipart.js
generated
vendored
Normal file
306
node_modules/@fastify/busboy/lib/types/multipart.js
generated
vendored
Normal file
@ -0,0 +1,306 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
// TODO:
|
||||||
|
// * support 1 nested multipart level
|
||||||
|
// (see second multipart example here:
|
||||||
|
// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
|
||||||
|
// * support limits.fieldNameSize
|
||||||
|
// -- this will require modifications to utils.parseParams
|
||||||
|
|
||||||
|
const { Readable } = require('node:stream')
|
||||||
|
const { inherits } = require('node:util')
|
||||||
|
|
||||||
|
const Dicer = require('../../deps/dicer/lib/Dicer')
|
||||||
|
|
||||||
|
const parseParams = require('../utils/parseParams')
|
||||||
|
const decodeText = require('../utils/decodeText')
|
||||||
|
const basename = require('../utils/basename')
|
||||||
|
const getLimit = require('../utils/getLimit')
|
||||||
|
|
||||||
|
const RE_BOUNDARY = /^boundary$/i
|
||||||
|
const RE_FIELD = /^form-data$/i
|
||||||
|
const RE_CHARSET = /^charset$/i
|
||||||
|
const RE_FILENAME = /^filename$/i
|
||||||
|
const RE_NAME = /^name$/i
|
||||||
|
|
||||||
|
Multipart.detect = /^multipart\/form-data/i
|
||||||
|
function Multipart (boy, cfg) {
|
||||||
|
let i
|
||||||
|
let len
|
||||||
|
const self = this
|
||||||
|
let boundary
|
||||||
|
const limits = cfg.limits
|
||||||
|
const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
|
||||||
|
const parsedConType = cfg.parsedConType || []
|
||||||
|
const defCharset = cfg.defCharset || 'utf8'
|
||||||
|
const preservePath = cfg.preservePath
|
||||||
|
const fileOpts = { highWaterMark: cfg.fileHwm }
|
||||||
|
|
||||||
|
for (i = 0, len = parsedConType.length; i < len; ++i) {
|
||||||
|
if (Array.isArray(parsedConType[i]) &&
|
||||||
|
RE_BOUNDARY.test(parsedConType[i][0])) {
|
||||||
|
boundary = parsedConType[i][1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkFinished () {
|
||||||
|
if (nends === 0 && finished && !boy._done) {
|
||||||
|
finished = false
|
||||||
|
self.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
|
||||||
|
|
||||||
|
const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
|
||||||
|
const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
|
||||||
|
const filesLimit = getLimit(limits, 'files', Infinity)
|
||||||
|
const fieldsLimit = getLimit(limits, 'fields', Infinity)
|
||||||
|
const partsLimit = getLimit(limits, 'parts', Infinity)
|
||||||
|
const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
|
||||||
|
const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
|
||||||
|
|
||||||
|
let nfiles = 0
|
||||||
|
let nfields = 0
|
||||||
|
let nends = 0
|
||||||
|
let curFile
|
||||||
|
let curField
|
||||||
|
let finished = false
|
||||||
|
|
||||||
|
this._needDrain = false
|
||||||
|
this._pause = false
|
||||||
|
this._cb = undefined
|
||||||
|
this._nparts = 0
|
||||||
|
this._boy = boy
|
||||||
|
|
||||||
|
const parserCfg = {
|
||||||
|
boundary,
|
||||||
|
maxHeaderPairs: headerPairsLimit,
|
||||||
|
maxHeaderSize: headerSizeLimit,
|
||||||
|
partHwm: fileOpts.highWaterMark,
|
||||||
|
highWaterMark: cfg.highWaterMark
|
||||||
|
}
|
||||||
|
|
||||||
|
this.parser = new Dicer(parserCfg)
|
||||||
|
this.parser.on('drain', function () {
|
||||||
|
self._needDrain = false
|
||||||
|
if (self._cb && !self._pause) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}).on('part', function onPart (part) {
|
||||||
|
if (++self._nparts > partsLimit) {
|
||||||
|
self.parser.removeListener('part', onPart)
|
||||||
|
self.parser.on('part', skipPart)
|
||||||
|
boy.hitPartsLimit = true
|
||||||
|
boy.emit('partsLimit')
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
// hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
|
||||||
|
// us emit 'end' early since we know the part has ended if we are already
|
||||||
|
// seeing the next part
|
||||||
|
if (curField) {
|
||||||
|
const field = curField
|
||||||
|
field.emit('end')
|
||||||
|
field.removeAllListeners('end')
|
||||||
|
}
|
||||||
|
|
||||||
|
part.on('header', function (header) {
|
||||||
|
let contype
|
||||||
|
let fieldname
|
||||||
|
let parsed
|
||||||
|
let charset
|
||||||
|
let encoding
|
||||||
|
let filename
|
||||||
|
let nsize = 0
|
||||||
|
|
||||||
|
if (header['content-type']) {
|
||||||
|
parsed = parseParams(header['content-type'][0])
|
||||||
|
if (parsed[0]) {
|
||||||
|
contype = parsed[0].toLowerCase()
|
||||||
|
for (i = 0, len = parsed.length; i < len; ++i) {
|
||||||
|
if (RE_CHARSET.test(parsed[i][0])) {
|
||||||
|
charset = parsed[i][1].toLowerCase()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contype === undefined) { contype = 'text/plain' }
|
||||||
|
if (charset === undefined) { charset = defCharset }
|
||||||
|
|
||||||
|
if (header['content-disposition']) {
|
||||||
|
parsed = parseParams(header['content-disposition'][0])
|
||||||
|
if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
|
||||||
|
for (i = 0, len = parsed.length; i < len; ++i) {
|
||||||
|
if (RE_NAME.test(parsed[i][0])) {
|
||||||
|
fieldname = parsed[i][1]
|
||||||
|
} else if (RE_FILENAME.test(parsed[i][0])) {
|
||||||
|
filename = parsed[i][1]
|
||||||
|
if (!preservePath) { filename = basename(filename) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { return skipPart(part) }
|
||||||
|
|
||||||
|
if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
|
||||||
|
|
||||||
|
let onData,
|
||||||
|
onEnd
|
||||||
|
|
||||||
|
if (isPartAFile(fieldname, contype, filename)) {
|
||||||
|
// file/binary field
|
||||||
|
if (nfiles === filesLimit) {
|
||||||
|
if (!boy.hitFilesLimit) {
|
||||||
|
boy.hitFilesLimit = true
|
||||||
|
boy.emit('filesLimit')
|
||||||
|
}
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
++nfiles
|
||||||
|
|
||||||
|
if (!boy._events.file) {
|
||||||
|
self.parser._ignore()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
++nends
|
||||||
|
const file = new FileStream(fileOpts)
|
||||||
|
curFile = file
|
||||||
|
file.on('end', function () {
|
||||||
|
--nends
|
||||||
|
self._pause = false
|
||||||
|
checkFinished()
|
||||||
|
if (self._cb && !self._needDrain) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
file._read = function (n) {
|
||||||
|
if (!self._pause) { return }
|
||||||
|
self._pause = false
|
||||||
|
if (self._cb && !self._needDrain) {
|
||||||
|
const cb = self._cb
|
||||||
|
self._cb = undefined
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
boy.emit('file', fieldname, file, filename, encoding, contype)
|
||||||
|
|
||||||
|
onData = function (data) {
|
||||||
|
if ((nsize += data.length) > fileSizeLimit) {
|
||||||
|
const extralen = fileSizeLimit - nsize + data.length
|
||||||
|
if (extralen > 0) { file.push(data.slice(0, extralen)) }
|
||||||
|
file.truncated = true
|
||||||
|
file.bytesRead = fileSizeLimit
|
||||||
|
part.removeAllListeners('data')
|
||||||
|
file.emit('limit')
|
||||||
|
return
|
||||||
|
} else if (!file.push(data)) { self._pause = true }
|
||||||
|
|
||||||
|
file.bytesRead = nsize
|
||||||
|
}
|
||||||
|
|
||||||
|
onEnd = function () {
|
||||||
|
curFile = undefined
|
||||||
|
file.push(null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// non-file field
|
||||||
|
if (nfields === fieldsLimit) {
|
||||||
|
if (!boy.hitFieldsLimit) {
|
||||||
|
boy.hitFieldsLimit = true
|
||||||
|
boy.emit('fieldsLimit')
|
||||||
|
}
|
||||||
|
return skipPart(part)
|
||||||
|
}
|
||||||
|
|
||||||
|
++nfields
|
||||||
|
++nends
|
||||||
|
let buffer = ''
|
||||||
|
let truncated = false
|
||||||
|
curField = part
|
||||||
|
|
||||||
|
onData = function (data) {
|
||||||
|
if ((nsize += data.length) > fieldSizeLimit) {
|
||||||
|
const extralen = (fieldSizeLimit - (nsize - data.length))
|
||||||
|
buffer += data.toString('binary', 0, extralen)
|
||||||
|
truncated = true
|
||||||
|
part.removeAllListeners('data')
|
||||||
|
} else { buffer += data.toString('binary') }
|
||||||
|
}
|
||||||
|
|
||||||
|
onEnd = function () {
|
||||||
|
curField = undefined
|
||||||
|
if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
|
||||||
|
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
|
||||||
|
--nends
|
||||||
|
checkFinished()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
|
||||||
|
broken. Streams2/streams3 is a huge black box of confusion, but
|
||||||
|
somehow overriding the sync state seems to fix things again (and still
|
||||||
|
seems to work for previous node versions).
|
||||||
|
*/
|
||||||
|
part._readableState.sync = false
|
||||||
|
|
||||||
|
part.on('data', onData)
|
||||||
|
part.on('end', onEnd)
|
||||||
|
}).on('error', function (err) {
|
||||||
|
if (curFile) { curFile.emit('error', err) }
|
||||||
|
})
|
||||||
|
}).on('error', function (err) {
|
||||||
|
boy.emit('error', err)
|
||||||
|
}).on('finish', function () {
|
||||||
|
finished = true
|
||||||
|
checkFinished()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Multipart.prototype.write = function (chunk, cb) {
|
||||||
|
const r = this.parser.write(chunk)
|
||||||
|
if (r && !this._pause) {
|
||||||
|
cb()
|
||||||
|
} else {
|
||||||
|
this._needDrain = !r
|
||||||
|
this._cb = cb
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Multipart.prototype.end = function () {
|
||||||
|
const self = this
|
||||||
|
|
||||||
|
if (self.parser.writable) {
|
||||||
|
self.parser.end()
|
||||||
|
} else if (!self._boy._done) {
|
||||||
|
process.nextTick(function () {
|
||||||
|
self._boy._done = true
|
||||||
|
self._boy.emit('finish')
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipPart (part) {
|
||||||
|
part.resume()
|
||||||
|
}
|
||||||
|
|
||||||
|
function FileStream (opts) {
|
||||||
|
Readable.call(this, opts)
|
||||||
|
|
||||||
|
this.bytesRead = 0
|
||||||
|
|
||||||
|
this.truncated = false
|
||||||
|
}
|
||||||
|
|
||||||
|
inherits(FileStream, Readable)
|
||||||
|
|
||||||
|
FileStream.prototype._read = function (n) {}
|
||||||
|
|
||||||
|
module.exports = Multipart
|
190
node_modules/@fastify/busboy/lib/types/urlencoded.js
generated
vendored
Normal file
190
node_modules/@fastify/busboy/lib/types/urlencoded.js
generated
vendored
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Decoder = require('../utils/Decoder')
|
||||||
|
const decodeText = require('../utils/decodeText')
|
||||||
|
const getLimit = require('../utils/getLimit')
|
||||||
|
|
||||||
|
const RE_CHARSET = /^charset$/i
|
||||||
|
|
||||||
|
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
|
||||||
|
function UrlEncoded (boy, cfg) {
|
||||||
|
const limits = cfg.limits
|
||||||
|
const parsedConType = cfg.parsedConType
|
||||||
|
this.boy = boy
|
||||||
|
|
||||||
|
this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
|
||||||
|
this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
|
||||||
|
this.fieldsLimit = getLimit(limits, 'fields', Infinity)
|
||||||
|
|
||||||
|
let charset
|
||||||
|
for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
if (Array.isArray(parsedConType[i]) &&
|
||||||
|
RE_CHARSET.test(parsedConType[i][0])) {
|
||||||
|
charset = parsedConType[i][1].toLowerCase()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
|
||||||
|
|
||||||
|
this.decoder = new Decoder()
|
||||||
|
this.charset = charset
|
||||||
|
this._fields = 0
|
||||||
|
this._state = 'key'
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._bytesVal = 0
|
||||||
|
this._key = ''
|
||||||
|
this._val = ''
|
||||||
|
this._keyTrunc = false
|
||||||
|
this._valTrunc = false
|
||||||
|
this._hitLimit = false
|
||||||
|
}
|
||||||
|
|
||||||
|
UrlEncoded.prototype.write = function (data, cb) {
|
||||||
|
if (this._fields === this.fieldsLimit) {
|
||||||
|
if (!this.boy.hitFieldsLimit) {
|
||||||
|
this.boy.hitFieldsLimit = true
|
||||||
|
this.boy.emit('fieldsLimit')
|
||||||
|
}
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
let idxeq; let idxamp; let i; let p = 0; const len = data.length
|
||||||
|
|
||||||
|
while (p < len) {
|
||||||
|
if (this._state === 'key') {
|
||||||
|
idxeq = idxamp = undefined
|
||||||
|
for (i = p; i < len; ++i) {
|
||||||
|
if (!this._checkingBytes) { ++p }
|
||||||
|
if (data[i] === 0x3D/* = */) {
|
||||||
|
idxeq = i
|
||||||
|
break
|
||||||
|
} else if (data[i] === 0x26/* & */) {
|
||||||
|
idxamp = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
|
||||||
|
this._hitLimit = true
|
||||||
|
break
|
||||||
|
} else if (this._checkingBytes) { ++this._bytesKey }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idxeq !== undefined) {
|
||||||
|
// key with assignment
|
||||||
|
if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
|
||||||
|
this._state = 'val'
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._val = ''
|
||||||
|
this._bytesVal = 0
|
||||||
|
this._valTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
p = idxeq + 1
|
||||||
|
} else if (idxamp !== undefined) {
|
||||||
|
// key with no assignment
|
||||||
|
++this._fields
|
||||||
|
let key; const keyTrunc = this._keyTrunc
|
||||||
|
if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._key = ''
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._keyTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
if (key.length) {
|
||||||
|
this.boy.emit('field', decodeText(key, 'binary', this.charset),
|
||||||
|
'',
|
||||||
|
keyTrunc,
|
||||||
|
false)
|
||||||
|
}
|
||||||
|
|
||||||
|
p = idxamp + 1
|
||||||
|
if (this._fields === this.fieldsLimit) { return cb() }
|
||||||
|
} else if (this._hitLimit) {
|
||||||
|
// we may not have hit the actual limit if there are encoded bytes...
|
||||||
|
if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
|
||||||
|
p = i
|
||||||
|
if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
|
||||||
|
// yep, we actually did hit the limit
|
||||||
|
this._checkingBytes = false
|
||||||
|
this._keyTrunc = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
|
||||||
|
p = len
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
idxamp = undefined
|
||||||
|
for (i = p; i < len; ++i) {
|
||||||
|
if (!this._checkingBytes) { ++p }
|
||||||
|
if (data[i] === 0x26/* & */) {
|
||||||
|
idxamp = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
|
||||||
|
this._hitLimit = true
|
||||||
|
break
|
||||||
|
} else if (this._checkingBytes) { ++this._bytesVal }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idxamp !== undefined) {
|
||||||
|
++this._fields
|
||||||
|
if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
decodeText(this._val, 'binary', this.charset),
|
||||||
|
this._keyTrunc,
|
||||||
|
this._valTrunc)
|
||||||
|
this._state = 'key'
|
||||||
|
|
||||||
|
this._hitLimit = false
|
||||||
|
this._checkingBytes = true
|
||||||
|
this._key = ''
|
||||||
|
this._bytesKey = 0
|
||||||
|
this._keyTrunc = false
|
||||||
|
this.decoder.reset()
|
||||||
|
|
||||||
|
p = idxamp + 1
|
||||||
|
if (this._fields === this.fieldsLimit) { return cb() }
|
||||||
|
} else if (this._hitLimit) {
|
||||||
|
// we may not have hit the actual limit if there are encoded bytes...
|
||||||
|
if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
|
||||||
|
p = i
|
||||||
|
if ((this._val === '' && this.fieldSizeLimit === 0) ||
|
||||||
|
(this._bytesVal = this._val.length) === this.fieldSizeLimit) {
|
||||||
|
// yep, we actually did hit the limit
|
||||||
|
this._checkingBytes = false
|
||||||
|
this._valTrunc = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
|
||||||
|
p = len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
UrlEncoded.prototype.end = function () {
|
||||||
|
if (this.boy._done) { return }
|
||||||
|
|
||||||
|
if (this._state === 'key' && this._key.length > 0) {
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
'',
|
||||||
|
this._keyTrunc,
|
||||||
|
false)
|
||||||
|
} else if (this._state === 'val') {
|
||||||
|
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
|
||||||
|
decodeText(this._val, 'binary', this.charset),
|
||||||
|
this._keyTrunc,
|
||||||
|
this._valTrunc)
|
||||||
|
}
|
||||||
|
this.boy._done = true
|
||||||
|
this.boy.emit('finish')
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = UrlEncoded
|
54
node_modules/@fastify/busboy/lib/utils/Decoder.js
generated
vendored
Normal file
54
node_modules/@fastify/busboy/lib/utils/Decoder.js
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const RE_PLUS = /\+/g
|
||||||
|
|
||||||
|
const HEX = [
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||||
|
]
|
||||||
|
|
||||||
|
function Decoder () {
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
Decoder.prototype.write = function (str) {
|
||||||
|
// Replace '+' with ' ' before decoding
|
||||||
|
str = str.replace(RE_PLUS, ' ')
|
||||||
|
let res = ''
|
||||||
|
let i = 0; let p = 0; const len = str.length
|
||||||
|
for (; i < len; ++i) {
|
||||||
|
if (this.buffer !== undefined) {
|
||||||
|
if (!HEX[str.charCodeAt(i)]) {
|
||||||
|
res += '%' + this.buffer
|
||||||
|
this.buffer = undefined
|
||||||
|
--i // retry character
|
||||||
|
} else {
|
||||||
|
this.buffer += str[i]
|
||||||
|
++p
|
||||||
|
if (this.buffer.length === 2) {
|
||||||
|
res += String.fromCharCode(parseInt(this.buffer, 16))
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (str[i] === '%') {
|
||||||
|
if (i > p) {
|
||||||
|
res += str.substring(p, i)
|
||||||
|
p = i
|
||||||
|
}
|
||||||
|
this.buffer = ''
|
||||||
|
++p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (p < len && this.buffer === undefined) { res += str.substring(p) }
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
Decoder.prototype.reset = function () {
|
||||||
|
this.buffer = undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Decoder
|
14
node_modules/@fastify/busboy/lib/utils/basename.js
generated
vendored
Normal file
14
node_modules/@fastify/busboy/lib/utils/basename.js
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = function basename (path) {
|
||||||
|
if (typeof path !== 'string') { return '' }
|
||||||
|
for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
|
||||||
|
switch (path.charCodeAt(i)) {
|
||||||
|
case 0x2F: // '/'
|
||||||
|
case 0x5C: // '\'
|
||||||
|
path = path.slice(i + 1)
|
||||||
|
return (path === '..' || path === '.' ? '' : path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (path === '..' || path === '.' ? '' : path)
|
||||||
|
}
|
114
node_modules/@fastify/busboy/lib/utils/decodeText.js
generated
vendored
Normal file
114
node_modules/@fastify/busboy/lib/utils/decodeText.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
// Node has always utf-8
|
||||||
|
const utf8Decoder = new TextDecoder('utf-8')
|
||||||
|
const textDecoders = new Map([
|
||||||
|
['utf-8', utf8Decoder],
|
||||||
|
['utf8', utf8Decoder]
|
||||||
|
])
|
||||||
|
|
||||||
|
function getDecoder (charset) {
|
||||||
|
let lc
|
||||||
|
while (true) {
|
||||||
|
switch (charset) {
|
||||||
|
case 'utf-8':
|
||||||
|
case 'utf8':
|
||||||
|
return decoders.utf8
|
||||||
|
case 'latin1':
|
||||||
|
case 'ascii': // TODO: Make these a separate, strict decoder?
|
||||||
|
case 'us-ascii':
|
||||||
|
case 'iso-8859-1':
|
||||||
|
case 'iso8859-1':
|
||||||
|
case 'iso88591':
|
||||||
|
case 'iso_8859-1':
|
||||||
|
case 'windows-1252':
|
||||||
|
case 'iso_8859-1:1987':
|
||||||
|
case 'cp1252':
|
||||||
|
case 'x-cp1252':
|
||||||
|
return decoders.latin1
|
||||||
|
case 'utf16le':
|
||||||
|
case 'utf-16le':
|
||||||
|
case 'ucs2':
|
||||||
|
case 'ucs-2':
|
||||||
|
return decoders.utf16le
|
||||||
|
case 'base64':
|
||||||
|
return decoders.base64
|
||||||
|
default:
|
||||||
|
if (lc === undefined) {
|
||||||
|
lc = true
|
||||||
|
charset = charset.toLowerCase()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return decoders.other.bind(charset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const decoders = {
|
||||||
|
utf8: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.utf8Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
latin1: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
return data.latin1Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
utf16le: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.ucs2Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
base64: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
return data.base64Slice(0, data.length)
|
||||||
|
},
|
||||||
|
|
||||||
|
other: (data, sourceEncoding) => {
|
||||||
|
if (data.length === 0) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
data = Buffer.from(data, sourceEncoding)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (textDecoders.has(this.toString())) {
|
||||||
|
try {
|
||||||
|
return textDecoders.get(this).decode(data)
|
||||||
|
} catch (e) { }
|
||||||
|
}
|
||||||
|
return typeof data === 'string'
|
||||||
|
? data
|
||||||
|
: data.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeText (text, sourceEncoding, destEncoding) {
|
||||||
|
if (text) {
|
||||||
|
return getDecoder(destEncoding)(text, sourceEncoding)
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = decodeText
|
16
node_modules/@fastify/busboy/lib/utils/getLimit.js
generated
vendored
Normal file
16
node_modules/@fastify/busboy/lib/utils/getLimit.js
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = function getLimit (limits, name, defaultLimit) {
|
||||||
|
if (
|
||||||
|
!limits ||
|
||||||
|
limits[name] === undefined ||
|
||||||
|
limits[name] === null
|
||||||
|
) { return defaultLimit }
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof limits[name] !== 'number' ||
|
||||||
|
isNaN(limits[name])
|
||||||
|
) { throw new TypeError('Limit ' + name + ' is not a valid number') }
|
||||||
|
|
||||||
|
return limits[name]
|
||||||
|
}
|
196
node_modules/@fastify/busboy/lib/utils/parseParams.js
generated
vendored
Normal file
196
node_modules/@fastify/busboy/lib/utils/parseParams.js
generated
vendored
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
/* eslint-disable object-property-newline */
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const decodeText = require('./decodeText')
|
||||||
|
|
||||||
|
const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
|
||||||
|
|
||||||
|
const EncodedLookup = {
|
||||||
|
'%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04',
|
||||||
|
'%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09',
|
||||||
|
'%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c',
|
||||||
|
'%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e',
|
||||||
|
'%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12',
|
||||||
|
'%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17',
|
||||||
|
'%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b',
|
||||||
|
'%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d',
|
||||||
|
'%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20',
|
||||||
|
'%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25',
|
||||||
|
'%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a',
|
||||||
|
'%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c',
|
||||||
|
'%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f',
|
||||||
|
'%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33',
|
||||||
|
'%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38',
|
||||||
|
'%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b',
|
||||||
|
'%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e',
|
||||||
|
'%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41',
|
||||||
|
'%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46',
|
||||||
|
'%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a',
|
||||||
|
'%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d',
|
||||||
|
'%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f',
|
||||||
|
'%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54',
|
||||||
|
'%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59',
|
||||||
|
'%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c',
|
||||||
|
'%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e',
|
||||||
|
'%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62',
|
||||||
|
'%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67',
|
||||||
|
'%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b',
|
||||||
|
'%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d',
|
||||||
|
'%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70',
|
||||||
|
'%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75',
|
||||||
|
'%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a',
|
||||||
|
'%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c',
|
||||||
|
'%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f',
|
||||||
|
'%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83',
|
||||||
|
'%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88',
|
||||||
|
'%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b',
|
||||||
|
'%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e',
|
||||||
|
'%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91',
|
||||||
|
'%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96',
|
||||||
|
'%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a',
|
||||||
|
'%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d',
|
||||||
|
'%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f',
|
||||||
|
'%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2',
|
||||||
|
'%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4',
|
||||||
|
'%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7',
|
||||||
|
'%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9',
|
||||||
|
'%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab',
|
||||||
|
'%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac',
|
||||||
|
'%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad',
|
||||||
|
'%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae',
|
||||||
|
'%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0',
|
||||||
|
'%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2',
|
||||||
|
'%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5',
|
||||||
|
'%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7',
|
||||||
|
'%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba',
|
||||||
|
'%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb',
|
||||||
|
'%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc',
|
||||||
|
'%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd',
|
||||||
|
'%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf',
|
||||||
|
'%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0',
|
||||||
|
'%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3',
|
||||||
|
'%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5',
|
||||||
|
'%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8',
|
||||||
|
'%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca',
|
||||||
|
'%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb',
|
||||||
|
'%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc',
|
||||||
|
'%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce',
|
||||||
|
'%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf',
|
||||||
|
'%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1',
|
||||||
|
'%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3',
|
||||||
|
'%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6',
|
||||||
|
'%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8',
|
||||||
|
'%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda',
|
||||||
|
'%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb',
|
||||||
|
'%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd',
|
||||||
|
'%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde',
|
||||||
|
'%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf',
|
||||||
|
'%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1',
|
||||||
|
'%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4',
|
||||||
|
'%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6',
|
||||||
|
'%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9',
|
||||||
|
'%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea',
|
||||||
|
'%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec',
|
||||||
|
'%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed',
|
||||||
|
'%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee',
|
||||||
|
'%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef',
|
||||||
|
'%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2',
|
||||||
|
'%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4',
|
||||||
|
'%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7',
|
||||||
|
'%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9',
|
||||||
|
'%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb',
|
||||||
|
'%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc',
|
||||||
|
'%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd',
|
||||||
|
'%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe',
|
||||||
|
'%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff'
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodedReplacer (match) {
|
||||||
|
return EncodedLookup[match]
|
||||||
|
}
|
||||||
|
|
||||||
|
const STATE_KEY = 0
|
||||||
|
const STATE_VALUE = 1
|
||||||
|
const STATE_CHARSET = 2
|
||||||
|
const STATE_LANG = 3
|
||||||
|
|
||||||
|
function parseParams (str) {
|
||||||
|
const res = []
|
||||||
|
let state = STATE_KEY
|
||||||
|
let charset = ''
|
||||||
|
let inquote = false
|
||||||
|
let escaping = false
|
||||||
|
let p = 0
|
||||||
|
let tmp = ''
|
||||||
|
const len = str.length
|
||||||
|
|
||||||
|
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
|
||||||
|
const char = str[i]
|
||||||
|
if (char === '\\' && inquote) {
|
||||||
|
if (escaping) { escaping = false } else {
|
||||||
|
escaping = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if (char === '"') {
|
||||||
|
if (!escaping) {
|
||||||
|
if (inquote) {
|
||||||
|
inquote = false
|
||||||
|
state = STATE_KEY
|
||||||
|
} else { inquote = true }
|
||||||
|
continue
|
||||||
|
} else { escaping = false }
|
||||||
|
} else {
|
||||||
|
if (escaping && inquote) { tmp += '\\' }
|
||||||
|
escaping = false
|
||||||
|
if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
|
||||||
|
if (state === STATE_CHARSET) {
|
||||||
|
state = STATE_LANG
|
||||||
|
charset = tmp.substring(1)
|
||||||
|
} else { state = STATE_VALUE }
|
||||||
|
tmp = ''
|
||||||
|
continue
|
||||||
|
} else if (state === STATE_KEY &&
|
||||||
|
(char === '*' || char === '=') &&
|
||||||
|
res.length) {
|
||||||
|
state = char === '*'
|
||||||
|
? STATE_CHARSET
|
||||||
|
: STATE_VALUE
|
||||||
|
res[p] = [tmp, undefined]
|
||||||
|
tmp = ''
|
||||||
|
continue
|
||||||
|
} else if (!inquote && char === ';') {
|
||||||
|
state = STATE_KEY
|
||||||
|
if (charset) {
|
||||||
|
if (tmp.length) {
|
||||||
|
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
|
||||||
|
'binary',
|
||||||
|
charset)
|
||||||
|
}
|
||||||
|
charset = ''
|
||||||
|
} else if (tmp.length) {
|
||||||
|
tmp = decodeText(tmp, 'binary', 'utf8')
|
||||||
|
}
|
||||||
|
if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
|
||||||
|
tmp = ''
|
||||||
|
++p
|
||||||
|
continue
|
||||||
|
} else if (!inquote && (char === ' ' || char === '\t')) { continue }
|
||||||
|
}
|
||||||
|
tmp += char
|
||||||
|
}
|
||||||
|
if (charset && tmp.length) {
|
||||||
|
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
|
||||||
|
'binary',
|
||||||
|
charset)
|
||||||
|
} else if (tmp) {
|
||||||
|
tmp = decodeText(tmp, 'binary', 'utf8')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res[p] === undefined) {
|
||||||
|
if (tmp) { res[p] = tmp }
|
||||||
|
} else { res[p][1] = tmp }
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parseParams
|
46
node_modules/before-after-hook/lib/add.js
generated
vendored
Normal file
46
node_modules/before-after-hook/lib/add.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
module.exports = addHook;
|
||||||
|
|
||||||
|
function addHook(state, kind, name, hook) {
|
||||||
|
var orig = hook;
|
||||||
|
if (!state.registry[name]) {
|
||||||
|
state.registry[name] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (kind === "before") {
|
||||||
|
hook = function (method, options) {
|
||||||
|
return Promise.resolve()
|
||||||
|
.then(orig.bind(null, options))
|
||||||
|
.then(method.bind(null, options));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (kind === "after") {
|
||||||
|
hook = function (method, options) {
|
||||||
|
var result;
|
||||||
|
return Promise.resolve()
|
||||||
|
.then(method.bind(null, options))
|
||||||
|
.then(function (result_) {
|
||||||
|
result = result_;
|
||||||
|
return orig(result, options);
|
||||||
|
})
|
||||||
|
.then(function () {
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (kind === "error") {
|
||||||
|
hook = function (method, options) {
|
||||||
|
return Promise.resolve()
|
||||||
|
.then(method.bind(null, options))
|
||||||
|
.catch(function (error) {
|
||||||
|
return orig(error, options);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
state.registry[name].push({
|
||||||
|
hook: hook,
|
||||||
|
orig: orig,
|
||||||
|
});
|
||||||
|
}
|
27
node_modules/before-after-hook/lib/register.js
generated
vendored
Normal file
27
node_modules/before-after-hook/lib/register.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
module.exports = register;
|
||||||
|
|
||||||
|
function register(state, name, method, options) {
|
||||||
|
if (typeof method !== "function") {
|
||||||
|
throw new Error("method for before hook must be a function");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options) {
|
||||||
|
options = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(name)) {
|
||||||
|
return name.reverse().reduce(function (callback, name) {
|
||||||
|
return register.bind(null, state, name, callback, options);
|
||||||
|
}, method)();
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.resolve().then(function () {
|
||||||
|
if (!state.registry[name]) {
|
||||||
|
return method(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
return state.registry[name].reduce(function (method, registered) {
|
||||||
|
return registered.hook.bind(null, method, options);
|
||||||
|
}, method)();
|
||||||
|
});
|
||||||
|
}
|
19
node_modules/before-after-hook/lib/remove.js
generated
vendored
Normal file
19
node_modules/before-after-hook/lib/remove.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
module.exports = removeHook;
|
||||||
|
|
||||||
|
function removeHook(state, name, method) {
|
||||||
|
if (!state.registry[name]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var index = state.registry[name]
|
||||||
|
.map(function (registered) {
|
||||||
|
return registered.orig;
|
||||||
|
})
|
||||||
|
.indexOf(method);
|
||||||
|
|
||||||
|
if (index === -1) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.registry[name].splice(index, 1);
|
||||||
|
}
|
38
node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
Normal file
38
node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||||
|
*
|
||||||
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||||
|
* Released under the MIT License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function isObject(o) {
|
||||||
|
return Object.prototype.toString.call(o) === '[object Object]';
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(o) {
|
||||||
|
var ctor,prot;
|
||||||
|
|
||||||
|
if (isObject(o) === false) return false;
|
||||||
|
|
||||||
|
// If has modified constructor
|
||||||
|
ctor = o.constructor;
|
||||||
|
if (ctor === undefined) return true;
|
||||||
|
|
||||||
|
// If has modified prototype
|
||||||
|
prot = ctor.prototype;
|
||||||
|
if (isObject(prot) === false) return false;
|
||||||
|
|
||||||
|
// If constructor does not have an Object-specific method
|
||||||
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Most likely a plain Object
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.isPlainObject = isPlainObject;
|
34
node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
Normal file
34
node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
/*!
|
||||||
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||||
|
*
|
||||||
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||||
|
* Released under the MIT License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function isObject(o) {
|
||||||
|
return Object.prototype.toString.call(o) === '[object Object]';
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(o) {
|
||||||
|
var ctor,prot;
|
||||||
|
|
||||||
|
if (isObject(o) === false) return false;
|
||||||
|
|
||||||
|
// If has modified constructor
|
||||||
|
ctor = o.constructor;
|
||||||
|
if (ctor === undefined) return true;
|
||||||
|
|
||||||
|
// If has modified prototype
|
||||||
|
prot = ctor.prototype;
|
||||||
|
if (isObject(prot) === false) return false;
|
||||||
|
|
||||||
|
// If constructor does not have an Object-specific method
|
||||||
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Most likely a plain Object
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { isPlainObject };
|
1777
node_modules/node-fetch/lib/index.es.js
generated
vendored
Normal file
1777
node_modules/node-fetch/lib/index.es.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1787
node_modules/node-fetch/lib/index.js
generated
vendored
Normal file
1787
node_modules/node-fetch/lib/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1775
node_modules/node-fetch/lib/index.mjs
generated
vendored
Normal file
1775
node_modules/node-fetch/lib/index.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
0
node_modules/tr46/lib/.gitkeep
generated
vendored
Normal file
0
node_modules/tr46/lib/.gitkeep
generated
vendored
Normal file
1
node_modules/tr46/lib/mappingTable.json
generated
vendored
Normal file
1
node_modules/tr46/lib/mappingTable.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
264
node_modules/tunnel/lib/tunnel.js
generated
vendored
Normal file
264
node_modules/tunnel/lib/tunnel.js
generated
vendored
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var net = require('net');
|
||||||
|
var tls = require('tls');
|
||||||
|
var http = require('http');
|
||||||
|
var https = require('https');
|
||||||
|
var events = require('events');
|
||||||
|
var assert = require('assert');
|
||||||
|
var util = require('util');
|
||||||
|
|
||||||
|
|
||||||
|
exports.httpOverHttp = httpOverHttp;
|
||||||
|
exports.httpsOverHttp = httpsOverHttp;
|
||||||
|
exports.httpOverHttps = httpOverHttps;
|
||||||
|
exports.httpsOverHttps = httpsOverHttps;
|
||||||
|
|
||||||
|
|
||||||
|
function httpOverHttp(options) {
|
||||||
|
var agent = new TunnelingAgent(options);
|
||||||
|
agent.request = http.request;
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
|
||||||
|
function httpsOverHttp(options) {
|
||||||
|
var agent = new TunnelingAgent(options);
|
||||||
|
agent.request = http.request;
|
||||||
|
agent.createSocket = createSecureSocket;
|
||||||
|
agent.defaultPort = 443;
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
|
||||||
|
function httpOverHttps(options) {
|
||||||
|
var agent = new TunnelingAgent(options);
|
||||||
|
agent.request = https.request;
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
|
||||||
|
function httpsOverHttps(options) {
|
||||||
|
var agent = new TunnelingAgent(options);
|
||||||
|
agent.request = https.request;
|
||||||
|
agent.createSocket = createSecureSocket;
|
||||||
|
agent.defaultPort = 443;
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function TunnelingAgent(options) {
|
||||||
|
var self = this;
|
||||||
|
self.options = options || {};
|
||||||
|
self.proxyOptions = self.options.proxy || {};
|
||||||
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
||||||
|
self.requests = [];
|
||||||
|
self.sockets = [];
|
||||||
|
|
||||||
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
||||||
|
var options = toOptions(host, port, localAddress);
|
||||||
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
||||||
|
var pending = self.requests[i];
|
||||||
|
if (pending.host === options.host && pending.port === options.port) {
|
||||||
|
// Detect the request to connect same origin server,
|
||||||
|
// reuse the connection.
|
||||||
|
self.requests.splice(i, 1);
|
||||||
|
pending.request.onSocket(socket);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
socket.destroy();
|
||||||
|
self.removeSocket(socket);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
||||||
|
|
||||||
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
||||||
|
var self = this;
|
||||||
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
||||||
|
|
||||||
|
if (self.sockets.length >= this.maxSockets) {
|
||||||
|
// We are over limit so we'll add it to the queue.
|
||||||
|
self.requests.push(options);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we are under maxSockets create a new one.
|
||||||
|
self.createSocket(options, function(socket) {
|
||||||
|
socket.on('free', onFree);
|
||||||
|
socket.on('close', onCloseOrRemove);
|
||||||
|
socket.on('agentRemove', onCloseOrRemove);
|
||||||
|
req.onSocket(socket);
|
||||||
|
|
||||||
|
function onFree() {
|
||||||
|
self.emit('free', socket, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onCloseOrRemove(err) {
|
||||||
|
self.removeSocket(socket);
|
||||||
|
socket.removeListener('free', onFree);
|
||||||
|
socket.removeListener('close', onCloseOrRemove);
|
||||||
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
||||||
|
var self = this;
|
||||||
|
var placeholder = {};
|
||||||
|
self.sockets.push(placeholder);
|
||||||
|
|
||||||
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
||||||
|
method: 'CONNECT',
|
||||||
|
path: options.host + ':' + options.port,
|
||||||
|
agent: false,
|
||||||
|
headers: {
|
||||||
|
host: options.host + ':' + options.port
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (options.localAddress) {
|
||||||
|
connectOptions.localAddress = options.localAddress;
|
||||||
|
}
|
||||||
|
if (connectOptions.proxyAuth) {
|
||||||
|
connectOptions.headers = connectOptions.headers || {};
|
||||||
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
||||||
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
debug('making CONNECT request');
|
||||||
|
var connectReq = self.request(connectOptions);
|
||||||
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
||||||
|
connectReq.once('response', onResponse); // for v0.6
|
||||||
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
||||||
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
||||||
|
connectReq.once('error', onError);
|
||||||
|
connectReq.end();
|
||||||
|
|
||||||
|
function onResponse(res) {
|
||||||
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
||||||
|
res.upgrade = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function onUpgrade(res, socket, head) {
|
||||||
|
// Hacky.
|
||||||
|
process.nextTick(function() {
|
||||||
|
onConnect(res, socket, head);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function onConnect(res, socket, head) {
|
||||||
|
connectReq.removeAllListeners();
|
||||||
|
socket.removeAllListeners();
|
||||||
|
|
||||||
|
if (res.statusCode !== 200) {
|
||||||
|
debug('tunneling socket could not be established, statusCode=%d',
|
||||||
|
res.statusCode);
|
||||||
|
socket.destroy();
|
||||||
|
var error = new Error('tunneling socket could not be established, ' +
|
||||||
|
'statusCode=' + res.statusCode);
|
||||||
|
error.code = 'ECONNRESET';
|
||||||
|
options.request.emit('error', error);
|
||||||
|
self.removeSocket(placeholder);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (head.length > 0) {
|
||||||
|
debug('got illegal response body from proxy');
|
||||||
|
socket.destroy();
|
||||||
|
var error = new Error('got illegal response body from proxy');
|
||||||
|
error.code = 'ECONNRESET';
|
||||||
|
options.request.emit('error', error);
|
||||||
|
self.removeSocket(placeholder);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
debug('tunneling connection has established');
|
||||||
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
||||||
|
return cb(socket);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onError(cause) {
|
||||||
|
connectReq.removeAllListeners();
|
||||||
|
|
||||||
|
debug('tunneling socket could not be established, cause=%s\n',
|
||||||
|
cause.message, cause.stack);
|
||||||
|
var error = new Error('tunneling socket could not be established, ' +
|
||||||
|
'cause=' + cause.message);
|
||||||
|
error.code = 'ECONNRESET';
|
||||||
|
options.request.emit('error', error);
|
||||||
|
self.removeSocket(placeholder);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
||||||
|
var pos = this.sockets.indexOf(socket)
|
||||||
|
if (pos === -1) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.sockets.splice(pos, 1);
|
||||||
|
|
||||||
|
var pending = this.requests.shift();
|
||||||
|
if (pending) {
|
||||||
|
// If we have pending requests and a socket gets closed a new one
|
||||||
|
// needs to be created to take over in the pool for the one that closed.
|
||||||
|
this.createSocket(pending, function(socket) {
|
||||||
|
pending.request.onSocket(socket);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function createSecureSocket(options, cb) {
|
||||||
|
var self = this;
|
||||||
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
||||||
|
var hostHeader = options.request.getHeader('host');
|
||||||
|
var tlsOptions = mergeOptions({}, self.options, {
|
||||||
|
socket: socket,
|
||||||
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
||||||
|
});
|
||||||
|
|
||||||
|
// 0 is dummy port for v0.6
|
||||||
|
var secureSocket = tls.connect(0, tlsOptions);
|
||||||
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
||||||
|
cb(secureSocket);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function toOptions(host, port, localAddress) {
|
||||||
|
if (typeof host === 'string') { // since v0.10
|
||||||
|
return {
|
||||||
|
host: host,
|
||||||
|
port: port,
|
||||||
|
localAddress: localAddress
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return host; // for v0.11 or later
|
||||||
|
}
|
||||||
|
|
||||||
|
function mergeOptions(target) {
|
||||||
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
||||||
|
var overrides = arguments[i];
|
||||||
|
if (typeof overrides === 'object') {
|
||||||
|
var keys = Object.keys(overrides);
|
||||||
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
||||||
|
var k = keys[j];
|
||||||
|
if (overrides[k] !== undefined) {
|
||||||
|
target[k] = overrides[k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var debug;
|
||||||
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
||||||
|
debug = function() {
|
||||||
|
var args = Array.prototype.slice.call(arguments);
|
||||||
|
if (typeof args[0] === 'string') {
|
||||||
|
args[0] = 'TUNNEL: ' + args[0];
|
||||||
|
} else {
|
||||||
|
args.unshift('TUNNEL:');
|
||||||
|
}
|
||||||
|
console.error.apply(console, args);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug = function() {};
|
||||||
|
}
|
||||||
|
exports.debug = debug; // for test
|
148
node_modules/undici/lib/agent.js
generated
vendored
Normal file
148
node_modules/undici/lib/agent.js
generated
vendored
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { InvalidArgumentError } = require('./core/errors')
|
||||||
|
const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols')
|
||||||
|
const DispatcherBase = require('./dispatcher-base')
|
||||||
|
const Pool = require('./pool')
|
||||||
|
const Client = require('./client')
|
||||||
|
const util = require('./core/util')
|
||||||
|
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
|
||||||
|
const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')()
|
||||||
|
|
||||||
|
const kOnConnect = Symbol('onConnect')
|
||||||
|
const kOnDisconnect = Symbol('onDisconnect')
|
||||||
|
const kOnConnectionError = Symbol('onConnectionError')
|
||||||
|
const kMaxRedirections = Symbol('maxRedirections')
|
||||||
|
const kOnDrain = Symbol('onDrain')
|
||||||
|
const kFactory = Symbol('factory')
|
||||||
|
const kFinalizer = Symbol('finalizer')
|
||||||
|
const kOptions = Symbol('options')
|
||||||
|
|
||||||
|
function defaultFactory (origin, opts) {
|
||||||
|
return opts && opts.connections === 1
|
||||||
|
? new Client(origin, opts)
|
||||||
|
: new Pool(origin, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
class Agent extends DispatcherBase {
|
||||||
|
constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('factory must be a function.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
|
||||||
|
throw new InvalidArgumentError('connect must be a function or an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
|
||||||
|
throw new InvalidArgumentError('maxRedirections must be a positive number')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (connect && typeof connect !== 'function') {
|
||||||
|
connect = { ...connect }
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
|
||||||
|
? options.interceptors.Agent
|
||||||
|
: [createRedirectInterceptor({ maxRedirections })]
|
||||||
|
|
||||||
|
this[kOptions] = { ...util.deepClone(options), connect }
|
||||||
|
this[kOptions].interceptors = options.interceptors
|
||||||
|
? { ...options.interceptors }
|
||||||
|
: undefined
|
||||||
|
this[kMaxRedirections] = maxRedirections
|
||||||
|
this[kFactory] = factory
|
||||||
|
this[kClients] = new Map()
|
||||||
|
this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
|
||||||
|
const ref = this[kClients].get(key)
|
||||||
|
if (ref !== undefined && ref.deref() === undefined) {
|
||||||
|
this[kClients].delete(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const agent = this
|
||||||
|
|
||||||
|
this[kOnDrain] = (origin, targets) => {
|
||||||
|
agent.emit('drain', origin, [agent, ...targets])
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnConnect] = (origin, targets) => {
|
||||||
|
agent.emit('connect', origin, [agent, ...targets])
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnDisconnect] = (origin, targets, err) => {
|
||||||
|
agent.emit('disconnect', origin, [agent, ...targets], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kOnConnectionError] = (origin, targets, err) => {
|
||||||
|
agent.emit('connectionError', origin, [agent, ...targets], err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get [kRunning] () {
|
||||||
|
let ret = 0
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore next: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
ret += client[kRunning]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
[kDispatch] (opts, handler) {
|
||||||
|
let key
|
||||||
|
if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
|
||||||
|
key = String(opts.origin)
|
||||||
|
} else {
|
||||||
|
throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const ref = this[kClients].get(key)
|
||||||
|
|
||||||
|
let dispatcher = ref ? ref.deref() : null
|
||||||
|
if (!dispatcher) {
|
||||||
|
dispatcher = this[kFactory](opts.origin, this[kOptions])
|
||||||
|
.on('drain', this[kOnDrain])
|
||||||
|
.on('connect', this[kOnConnect])
|
||||||
|
.on('disconnect', this[kOnDisconnect])
|
||||||
|
.on('connectionError', this[kOnConnectionError])
|
||||||
|
|
||||||
|
this[kClients].set(key, new WeakRef(dispatcher))
|
||||||
|
this[kFinalizer].register(dispatcher, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dispatcher.dispatch(opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
async [kClose] () {
|
||||||
|
const closePromises = []
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore else: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
closePromises.push(client.close())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(closePromises)
|
||||||
|
}
|
||||||
|
|
||||||
|
async [kDestroy] (err) {
|
||||||
|
const destroyPromises = []
|
||||||
|
for (const ref of this[kClients].values()) {
|
||||||
|
const client = ref.deref()
|
||||||
|
/* istanbul ignore else: gc is undeterministic */
|
||||||
|
if (client) {
|
||||||
|
destroyPromises.push(client.destroy(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(destroyPromises)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Agent
|
54
node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
54
node_modules/undici/lib/api/abort-signal.js
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
const { addAbortListener } = require('../core/util')
|
||||||
|
const { RequestAbortedError } = require('../core/errors')
|
||||||
|
|
||||||
|
const kListener = Symbol('kListener')
|
||||||
|
const kSignal = Symbol('kSignal')
|
||||||
|
|
||||||
|
function abort (self) {
|
||||||
|
if (self.abort) {
|
||||||
|
self.abort()
|
||||||
|
} else {
|
||||||
|
self.onError(new RequestAbortedError())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addSignal (self, signal) {
|
||||||
|
self[kSignal] = null
|
||||||
|
self[kListener] = null
|
||||||
|
|
||||||
|
if (!signal) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal.aborted) {
|
||||||
|
abort(self)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self[kSignal] = signal
|
||||||
|
self[kListener] = () => {
|
||||||
|
abort(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
addAbortListener(self[kSignal], self[kListener])
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeSignal (self) {
|
||||||
|
if (!self[kSignal]) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('removeEventListener' in self[kSignal]) {
|
||||||
|
self[kSignal].removeEventListener('abort', self[kListener])
|
||||||
|
} else {
|
||||||
|
self[kSignal].removeListener('abort', self[kListener])
|
||||||
|
}
|
||||||
|
|
||||||
|
self[kSignal] = null
|
||||||
|
self[kListener] = null
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
addSignal,
|
||||||
|
removeSignal
|
||||||
|
}
|
104
node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
104
node_modules/undici/lib/api/api-connect.js
generated
vendored
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class ConnectHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, opaque, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_CONNECT')
|
||||||
|
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.callback = callback
|
||||||
|
this.abort = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders () {
|
||||||
|
throw new SocketError('bad connect', null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, rawHeaders, socket) {
|
||||||
|
const { callback, opaque, context } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
|
||||||
|
let headers = rawHeaders
|
||||||
|
// Indicates is an HTTP2Session
|
||||||
|
if (headers != null) {
|
||||||
|
headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
socket,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { callback, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function connect (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
connect.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const connectHandler = new ConnectHandler(opts, callback)
|
||||||
|
this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = connect
|
249
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
249
node_modules/undici/lib/api/api-pipeline.js
generated
vendored
Normal file
@ -0,0 +1,249 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
Readable,
|
||||||
|
Duplex,
|
||||||
|
PassThrough
|
||||||
|
} = require('stream')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
const kResume = Symbol('resume')
|
||||||
|
|
||||||
|
class PipelineRequest extends Readable {
|
||||||
|
constructor () {
|
||||||
|
super({ autoDestroy: true })
|
||||||
|
|
||||||
|
this[kResume] = null
|
||||||
|
}
|
||||||
|
|
||||||
|
_read () {
|
||||||
|
const { [kResume]: resume } = this
|
||||||
|
|
||||||
|
if (resume) {
|
||||||
|
this[kResume] = null
|
||||||
|
resume()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy (err, callback) {
|
||||||
|
this._read()
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PipelineResponse extends Readable {
|
||||||
|
constructor (resume) {
|
||||||
|
super({ autoDestroy: true })
|
||||||
|
this[kResume] = resume
|
||||||
|
}
|
||||||
|
|
||||||
|
_read () {
|
||||||
|
this[kResume]()
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy (err, callback) {
|
||||||
|
if (!err && !this._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PipelineHandler extends AsyncResource {
|
||||||
|
constructor (opts, handler) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid handler')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, onInfo, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_PIPELINE')
|
||||||
|
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.handler = handler
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
|
||||||
|
this.req = new PipelineRequest().on('error', util.nop)
|
||||||
|
|
||||||
|
this.ret = new Duplex({
|
||||||
|
readableObjectMode: opts.objectMode,
|
||||||
|
autoDestroy: true,
|
||||||
|
read: () => {
|
||||||
|
const { body } = this
|
||||||
|
|
||||||
|
if (body && body.resume) {
|
||||||
|
body.resume()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
write: (chunk, encoding, callback) => {
|
||||||
|
const { req } = this
|
||||||
|
|
||||||
|
if (req.push(chunk, encoding) || req._readableState.destroyed) {
|
||||||
|
callback()
|
||||||
|
} else {
|
||||||
|
req[kResume] = callback
|
||||||
|
}
|
||||||
|
},
|
||||||
|
destroy: (err, callback) => {
|
||||||
|
const { body, req, res, ret, abort } = this
|
||||||
|
|
||||||
|
if (!err && !ret._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abort && err) {
|
||||||
|
abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
util.destroy(body, err)
|
||||||
|
util.destroy(req, err)
|
||||||
|
util.destroy(res, err)
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
callback(err)
|
||||||
|
}
|
||||||
|
}).on('prefinish', () => {
|
||||||
|
const { req } = this
|
||||||
|
|
||||||
|
// Node < 15 does not call _final in same tick.
|
||||||
|
req.push(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
this.res = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
const { ret, res } = this
|
||||||
|
|
||||||
|
assert(!res, 'pipeline cannot be retried')
|
||||||
|
|
||||||
|
if (ret.destroyed) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume) {
|
||||||
|
const { opaque, handler, context } = this
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.res = new PipelineResponse(resume)
|
||||||
|
|
||||||
|
let body
|
||||||
|
try {
|
||||||
|
this.handler = null
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
body = this.runInAsyncScope(handler, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
opaque,
|
||||||
|
body: this.res,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
} catch (err) {
|
||||||
|
this.res.on('error', util.nop)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body || typeof body.on !== 'function') {
|
||||||
|
throw new InvalidReturnValueError('expected Readable')
|
||||||
|
}
|
||||||
|
|
||||||
|
body
|
||||||
|
.on('data', (chunk) => {
|
||||||
|
const { ret, body } = this
|
||||||
|
|
||||||
|
if (!ret.push(chunk) && body.pause) {
|
||||||
|
body.pause()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('error', (err) => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
util.destroy(ret, err)
|
||||||
|
})
|
||||||
|
.on('end', () => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
ret.push(null)
|
||||||
|
})
|
||||||
|
.on('close', () => {
|
||||||
|
const { ret } = this
|
||||||
|
|
||||||
|
if (!ret._readableState.ended) {
|
||||||
|
util.destroy(ret, new RequestAbortedError())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
this.body = body
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
return res.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
res.push(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { ret } = this
|
||||||
|
this.handler = null
|
||||||
|
util.destroy(ret, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pipeline (opts, handler) {
|
||||||
|
try {
|
||||||
|
const pipelineHandler = new PipelineHandler(opts, handler)
|
||||||
|
this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
|
||||||
|
return pipelineHandler.ret
|
||||||
|
} catch (err) {
|
||||||
|
return new PassThrough().destroy(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = pipeline
|
179
node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
179
node_modules/undici/lib/api/api-request.js
generated
vendored
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Readable = require('./readable')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { getResolveErrorBodyCallback } = require('./util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class RequestHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid highWaterMark')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_REQUEST')
|
||||||
|
} catch (err) {
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
util.destroy(body.on('error', util.nop), err)
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.callback = callback
|
||||||
|
this.res = null
|
||||||
|
this.abort = null
|
||||||
|
this.body = body
|
||||||
|
this.trailers = {}
|
||||||
|
this.context = null
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
this.throwOnError = throwOnError
|
||||||
|
this.highWaterMark = highWaterMark
|
||||||
|
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
body.on('error', (err) => {
|
||||||
|
this.onError(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||||
|
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
|
||||||
|
|
||||||
|
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||||
|
const contentType = parsedHeaders['content-type']
|
||||||
|
const body = new Readable({ resume, abort, contentType, highWaterMark })
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.res = body
|
||||||
|
if (callback !== null) {
|
||||||
|
if (this.throwOnError && statusCode >= 400) {
|
||||||
|
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||||
|
{ callback, body, contentType, statusCode, statusMessage, headers }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
trailers: this.trailers,
|
||||||
|
opaque,
|
||||||
|
body,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
return res.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
util.parseHeaders(trailers, this.trailers)
|
||||||
|
|
||||||
|
res.push(null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { res, callback, body, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
// TODO: Does this need queueMicrotask?
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res) {
|
||||||
|
this.res = null
|
||||||
|
// Ensure all queued handlers are invoked before destroying res.
|
||||||
|
queueMicrotask(() => {
|
||||||
|
util.destroy(res, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
this.body = null
|
||||||
|
util.destroy(body, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function request (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
request.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.dispatch(opts, new RequestHandler(opts, callback))
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = request
|
220
node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
220
node_modules/undici/lib/api/api-stream.js
generated
vendored
Normal file
@ -0,0 +1,220 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { finished, PassThrough } = require('stream')
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { getResolveErrorBodyCallback } = require('./util')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
|
||||||
|
class StreamHandler extends AsyncResource {
|
||||||
|
constructor (opts, factory, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid factory')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (method === 'CONNECT') {
|
||||||
|
throw new InvalidArgumentError('invalid method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onInfo && typeof onInfo !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onInfo callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_STREAM')
|
||||||
|
} catch (err) {
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
util.destroy(body.on('error', util.nop), err)
|
||||||
|
}
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.factory = factory
|
||||||
|
this.callback = callback
|
||||||
|
this.res = null
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
this.trailers = null
|
||||||
|
this.body = body
|
||||||
|
this.onInfo = onInfo || null
|
||||||
|
this.throwOnError = throwOnError || false
|
||||||
|
|
||||||
|
if (util.isStream(body)) {
|
||||||
|
body.on('error', (err) => {
|
||||||
|
this.onError(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = context
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
|
||||||
|
const { factory, opaque, context, callback, responseHeaders } = this
|
||||||
|
|
||||||
|
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
|
||||||
|
if (statusCode < 200) {
|
||||||
|
if (this.onInfo) {
|
||||||
|
this.onInfo({ statusCode, headers })
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.factory = null
|
||||||
|
|
||||||
|
let res
|
||||||
|
|
||||||
|
if (this.throwOnError && statusCode >= 400) {
|
||||||
|
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
|
||||||
|
const contentType = parsedHeaders['content-type']
|
||||||
|
res = new PassThrough()
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.runInAsyncScope(getResolveErrorBodyCallback, null,
|
||||||
|
{ callback, body: res, contentType, statusCode, statusMessage, headers }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
if (factory === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
res = this.runInAsyncScope(factory, null, {
|
||||||
|
statusCode,
|
||||||
|
headers,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res ||
|
||||||
|
typeof res.write !== 'function' ||
|
||||||
|
typeof res.end !== 'function' ||
|
||||||
|
typeof res.on !== 'function'
|
||||||
|
) {
|
||||||
|
throw new InvalidReturnValueError('expected Writable')
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
|
||||||
|
finished(res, { readable: false }, (err) => {
|
||||||
|
const { callback, res, opaque, trailers, abort } = this
|
||||||
|
|
||||||
|
this.res = null
|
||||||
|
if (err || !res.readable) {
|
||||||
|
util.destroy(res, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
abort()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
res.on('drain', resume)
|
||||||
|
|
||||||
|
this.res = res
|
||||||
|
|
||||||
|
const needDrain = res.writableNeedDrain !== undefined
|
||||||
|
? res.writableNeedDrain
|
||||||
|
: res._writableState && res._writableState.needDrain
|
||||||
|
|
||||||
|
return needDrain !== true
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
return res ? res.write(chunk) : true
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
const { res } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (!res) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.trailers = util.parseHeaders(trailers)
|
||||||
|
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { res, callback, opaque, body } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.factory = null
|
||||||
|
|
||||||
|
if (res) {
|
||||||
|
this.res = null
|
||||||
|
util.destroy(res, err)
|
||||||
|
} else if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
this.body = null
|
||||||
|
util.destroy(body, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stream (opts, factory, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.call(this, opts, factory, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.dispatch(opts, new StreamHandler(opts, factory, callback))
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = stream
|
105
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
105
node_modules/undici/lib/api/api-upgrade.js
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
|
||||||
|
const { AsyncResource } = require('async_hooks')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { addSignal, removeSignal } = require('./abort-signal')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
class UpgradeHandler extends AsyncResource {
|
||||||
|
constructor (opts, callback) {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('invalid opts')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { signal, opaque, responseHeaders } = opts
|
||||||
|
|
||||||
|
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
|
||||||
|
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
|
||||||
|
}
|
||||||
|
|
||||||
|
super('UNDICI_UPGRADE')
|
||||||
|
|
||||||
|
this.responseHeaders = responseHeaders || null
|
||||||
|
this.opaque = opaque || null
|
||||||
|
this.callback = callback
|
||||||
|
this.abort = null
|
||||||
|
this.context = null
|
||||||
|
|
||||||
|
addSignal(this, signal)
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort, context) {
|
||||||
|
if (!this.callback) {
|
||||||
|
throw new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.abort = abort
|
||||||
|
this.context = null
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders () {
|
||||||
|
throw new SocketError('bad upgrade', null)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, rawHeaders, socket) {
|
||||||
|
const { callback, opaque, context } = this
|
||||||
|
|
||||||
|
assert.strictEqual(statusCode, 101)
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
this.callback = null
|
||||||
|
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
|
||||||
|
this.runInAsyncScope(callback, null, null, {
|
||||||
|
headers,
|
||||||
|
socket,
|
||||||
|
opaque,
|
||||||
|
context
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (err) {
|
||||||
|
const { callback, opaque } = this
|
||||||
|
|
||||||
|
removeSignal(this)
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
this.callback = null
|
||||||
|
queueMicrotask(() => {
|
||||||
|
this.runInAsyncScope(callback, null, err, { opaque })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function upgrade (opts, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
upgrade.call(this, opts, (err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const upgradeHandler = new UpgradeHandler(opts, callback)
|
||||||
|
this.dispatch({
|
||||||
|
...opts,
|
||||||
|
method: opts.method || 'GET',
|
||||||
|
upgrade: opts.protocol || 'Websocket'
|
||||||
|
}, upgradeHandler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
const opaque = opts && opts.opaque
|
||||||
|
queueMicrotask(() => callback(err, { opaque }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = upgrade
|
7
node_modules/undici/lib/api/index.js
generated
vendored
Normal file
7
node_modules/undici/lib/api/index.js
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports.request = require('./api-request')
|
||||||
|
module.exports.stream = require('./api-stream')
|
||||||
|
module.exports.pipeline = require('./api-pipeline')
|
||||||
|
module.exports.upgrade = require('./api-upgrade')
|
||||||
|
module.exports.connect = require('./api-connect')
|
307
node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
307
node_modules/undici/lib/api/readable.js
generated
vendored
Normal file
@ -0,0 +1,307 @@
|
|||||||
|
// Ported from https://github.com/nodejs/undici/pull/907
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { Readable } = require('stream')
|
||||||
|
const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
|
||||||
|
const util = require('../core/util')
|
||||||
|
const { ReadableStreamFrom, toUSVString } = require('../core/util')
|
||||||
|
|
||||||
|
let Blob
|
||||||
|
|
||||||
|
const kConsume = Symbol('kConsume')
|
||||||
|
const kReading = Symbol('kReading')
|
||||||
|
const kBody = Symbol('kBody')
|
||||||
|
const kAbort = Symbol('abort')
|
||||||
|
const kContentType = Symbol('kContentType')
|
||||||
|
|
||||||
|
module.exports = class BodyReadable extends Readable {
|
||||||
|
constructor ({
|
||||||
|
resume,
|
||||||
|
abort,
|
||||||
|
contentType = '',
|
||||||
|
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
|
||||||
|
}) {
|
||||||
|
super({
|
||||||
|
autoDestroy: true,
|
||||||
|
read: resume,
|
||||||
|
highWaterMark
|
||||||
|
})
|
||||||
|
|
||||||
|
this._readableState.dataEmitted = false
|
||||||
|
|
||||||
|
this[kAbort] = abort
|
||||||
|
this[kConsume] = null
|
||||||
|
this[kBody] = null
|
||||||
|
this[kContentType] = contentType
|
||||||
|
|
||||||
|
// Is stream being consumed through Readable API?
|
||||||
|
// This is an optimization so that we avoid checking
|
||||||
|
// for 'data' and 'readable' listeners in the hot path
|
||||||
|
// inside push().
|
||||||
|
this[kReading] = false
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy (err) {
|
||||||
|
if (this.destroyed) {
|
||||||
|
// Node < 16
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!err && !this._readableState.endEmitted) {
|
||||||
|
err = new RequestAbortedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
this[kAbort]()
|
||||||
|
}
|
||||||
|
|
||||||
|
return super.destroy(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
emit (ev, ...args) {
|
||||||
|
if (ev === 'data') {
|
||||||
|
// Node < 16.7
|
||||||
|
this._readableState.dataEmitted = true
|
||||||
|
} else if (ev === 'error') {
|
||||||
|
// Node < 16
|
||||||
|
this._readableState.errorEmitted = true
|
||||||
|
}
|
||||||
|
return super.emit(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
on (ev, ...args) {
|
||||||
|
if (ev === 'data' || ev === 'readable') {
|
||||||
|
this[kReading] = true
|
||||||
|
}
|
||||||
|
return super.on(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
addListener (ev, ...args) {
|
||||||
|
return this.on(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
off (ev, ...args) {
|
||||||
|
const ret = super.off(ev, ...args)
|
||||||
|
if (ev === 'data' || ev === 'readable') {
|
||||||
|
this[kReading] = (
|
||||||
|
this.listenerCount('data') > 0 ||
|
||||||
|
this.listenerCount('readable') > 0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
removeListener (ev, ...args) {
|
||||||
|
return this.off(ev, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
push (chunk) {
|
||||||
|
if (this[kConsume] && chunk !== null && this.readableLength === 0) {
|
||||||
|
consumePush(this[kConsume], chunk)
|
||||||
|
return this[kReading] ? super.push(chunk) : true
|
||||||
|
}
|
||||||
|
return super.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-text
|
||||||
|
async text () {
|
||||||
|
return consume(this, 'text')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-json
|
||||||
|
async json () {
|
||||||
|
return consume(this, 'json')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-blob
|
||||||
|
async blob () {
|
||||||
|
return consume(this, 'blob')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-arraybuffer
|
||||||
|
async arrayBuffer () {
|
||||||
|
return consume(this, 'arrayBuffer')
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-formdata
|
||||||
|
async formData () {
|
||||||
|
// TODO: Implement.
|
||||||
|
throw new NotSupportedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-bodyused
|
||||||
|
get bodyUsed () {
|
||||||
|
return util.isDisturbed(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#dom-body-body
|
||||||
|
get body () {
|
||||||
|
if (!this[kBody]) {
|
||||||
|
this[kBody] = ReadableStreamFrom(this)
|
||||||
|
if (this[kConsume]) {
|
||||||
|
// TODO: Is this the best way to force a lock?
|
||||||
|
this[kBody].getReader() // Ensure stream is locked.
|
||||||
|
assert(this[kBody].locked)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this[kBody]
|
||||||
|
}
|
||||||
|
|
||||||
|
async dump (opts) {
|
||||||
|
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
|
||||||
|
const signal = opts && opts.signal
|
||||||
|
const abortFn = () => {
|
||||||
|
this.destroy()
|
||||||
|
}
|
||||||
|
let signalListenerCleanup
|
||||||
|
if (signal) {
|
||||||
|
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||||
|
throw new InvalidArgumentError('signal must be an AbortSignal')
|
||||||
|
}
|
||||||
|
util.throwIfAborted(signal)
|
||||||
|
signalListenerCleanup = util.addAbortListener(signal, abortFn)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
for await (const chunk of this) {
|
||||||
|
util.throwIfAborted(signal)
|
||||||
|
limit -= Buffer.byteLength(chunk)
|
||||||
|
if (limit < 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
util.throwIfAborted(signal)
|
||||||
|
} finally {
|
||||||
|
if (typeof signalListenerCleanup === 'function') {
|
||||||
|
signalListenerCleanup()
|
||||||
|
} else if (signalListenerCleanup) {
|
||||||
|
signalListenerCleanup[Symbol.dispose]()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://streams.spec.whatwg.org/#readablestream-locked
|
||||||
|
function isLocked (self) {
|
||||||
|
// Consume is an implicit lock.
|
||||||
|
return (self[kBody] && self[kBody].locked === true) || self[kConsume]
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#body-unusable
|
||||||
|
function isUnusable (self) {
|
||||||
|
return util.isDisturbed(self) || isLocked(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function consume (stream, type) {
|
||||||
|
if (isUnusable(stream)) {
|
||||||
|
throw new TypeError('unusable')
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(!stream[kConsume])
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream[kConsume] = {
|
||||||
|
type,
|
||||||
|
stream,
|
||||||
|
resolve,
|
||||||
|
reject,
|
||||||
|
length: 0,
|
||||||
|
body: []
|
||||||
|
}
|
||||||
|
|
||||||
|
stream
|
||||||
|
.on('error', function (err) {
|
||||||
|
consumeFinish(this[kConsume], err)
|
||||||
|
})
|
||||||
|
.on('close', function () {
|
||||||
|
if (this[kConsume].body !== null) {
|
||||||
|
consumeFinish(this[kConsume], new RequestAbortedError())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
process.nextTick(consumeStart, stream[kConsume])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeStart (consume) {
|
||||||
|
if (consume.body === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const { _readableState: state } = consume.stream
|
||||||
|
|
||||||
|
for (const chunk of state.buffer) {
|
||||||
|
consumePush(consume, chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.endEmitted) {
|
||||||
|
consumeEnd(this[kConsume])
|
||||||
|
} else {
|
||||||
|
consume.stream.on('end', function () {
|
||||||
|
consumeEnd(this[kConsume])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
consume.stream.resume()
|
||||||
|
|
||||||
|
while (consume.stream.read() != null) {
|
||||||
|
// Loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeEnd (consume) {
|
||||||
|
const { type, body, resolve, stream, length } = consume
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (type === 'text') {
|
||||||
|
resolve(toUSVString(Buffer.concat(body)))
|
||||||
|
} else if (type === 'json') {
|
||||||
|
resolve(JSON.parse(Buffer.concat(body)))
|
||||||
|
} else if (type === 'arrayBuffer') {
|
||||||
|
const dst = new Uint8Array(length)
|
||||||
|
|
||||||
|
let pos = 0
|
||||||
|
for (const buf of body) {
|
||||||
|
dst.set(buf, pos)
|
||||||
|
pos += buf.byteLength
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(dst.buffer)
|
||||||
|
} else if (type === 'blob') {
|
||||||
|
if (!Blob) {
|
||||||
|
Blob = require('buffer').Blob
|
||||||
|
}
|
||||||
|
resolve(new Blob(body, { type: stream[kContentType] }))
|
||||||
|
}
|
||||||
|
|
||||||
|
consumeFinish(consume)
|
||||||
|
} catch (err) {
|
||||||
|
stream.destroy(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumePush (consume, chunk) {
|
||||||
|
consume.length += chunk.length
|
||||||
|
consume.body.push(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
function consumeFinish (consume, err) {
|
||||||
|
if (consume.body === null) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err) {
|
||||||
|
consume.reject(err)
|
||||||
|
} else {
|
||||||
|
consume.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
consume.type = null
|
||||||
|
consume.stream = null
|
||||||
|
consume.resolve = null
|
||||||
|
consume.reject = null
|
||||||
|
consume.length = 0
|
||||||
|
consume.body = null
|
||||||
|
}
|
46
node_modules/undici/lib/api/util.js
generated
vendored
Normal file
46
node_modules/undici/lib/api/util.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
const assert = require('assert')
|
||||||
|
const {
|
||||||
|
ResponseStatusCodeError
|
||||||
|
} = require('../core/errors')
|
||||||
|
const { toUSVString } = require('../core/util')
|
||||||
|
|
||||||
|
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
|
||||||
|
assert(body)
|
||||||
|
|
||||||
|
let chunks = []
|
||||||
|
let limit = 0
|
||||||
|
|
||||||
|
for await (const chunk of body) {
|
||||||
|
chunks.push(chunk)
|
||||||
|
limit += chunk.length
|
||||||
|
if (limit > 128 * 1024) {
|
||||||
|
chunks = null
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (statusCode === 204 || !contentType || !chunks) {
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (contentType.startsWith('application/json')) {
|
||||||
|
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contentType.startsWith('text/')) {
|
||||||
|
const payload = toUSVString(Buffer.concat(chunks))
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Process in a fallback if error
|
||||||
|
}
|
||||||
|
|
||||||
|
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { getResolveErrorBodyCallback }
|
190
node_modules/undici/lib/balanced-pool.js
generated
vendored
Normal file
190
node_modules/undici/lib/balanced-pool.js
generated
vendored
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
BalancedPoolMissingUpstreamError,
|
||||||
|
InvalidArgumentError
|
||||||
|
} = require('./core/errors')
|
||||||
|
const {
|
||||||
|
PoolBase,
|
||||||
|
kClients,
|
||||||
|
kNeedDrain,
|
||||||
|
kAddClient,
|
||||||
|
kRemoveClient,
|
||||||
|
kGetDispatcher
|
||||||
|
} = require('./pool-base')
|
||||||
|
const Pool = require('./pool')
|
||||||
|
const { kUrl, kInterceptors } = require('./core/symbols')
|
||||||
|
const { parseOrigin } = require('./core/util')
|
||||||
|
const kFactory = Symbol('factory')
|
||||||
|
|
||||||
|
const kOptions = Symbol('options')
|
||||||
|
const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
|
||||||
|
const kCurrentWeight = Symbol('kCurrentWeight')
|
||||||
|
const kIndex = Symbol('kIndex')
|
||||||
|
const kWeight = Symbol('kWeight')
|
||||||
|
const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
|
||||||
|
const kErrorPenalty = Symbol('kErrorPenalty')
|
||||||
|
|
||||||
|
function getGreatestCommonDivisor (a, b) {
|
||||||
|
if (b === 0) return a
|
||||||
|
return getGreatestCommonDivisor(b, a % b)
|
||||||
|
}
|
||||||
|
|
||||||
|
function defaultFactory (origin, opts) {
|
||||||
|
return new Pool(origin, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
class BalancedPool extends PoolBase {
|
||||||
|
constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this[kOptions] = opts
|
||||||
|
this[kIndex] = -1
|
||||||
|
this[kCurrentWeight] = 0
|
||||||
|
|
||||||
|
this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
|
||||||
|
this[kErrorPenalty] = this[kOptions].errorPenalty || 15
|
||||||
|
|
||||||
|
if (!Array.isArray(upstreams)) {
|
||||||
|
upstreams = [upstreams]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof factory !== 'function') {
|
||||||
|
throw new InvalidArgumentError('factory must be a function.')
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
|
||||||
|
? opts.interceptors.BalancedPool
|
||||||
|
: []
|
||||||
|
this[kFactory] = factory
|
||||||
|
|
||||||
|
for (const upstream of upstreams) {
|
||||||
|
this.addUpstream(upstream)
|
||||||
|
}
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
}
|
||||||
|
|
||||||
|
addUpstream (upstream) {
|
||||||
|
const upstreamOrigin = parseOrigin(upstream).origin
|
||||||
|
|
||||||
|
if (this[kClients].find((pool) => (
|
||||||
|
pool[kUrl].origin === upstreamOrigin &&
|
||||||
|
pool.closed !== true &&
|
||||||
|
pool.destroyed !== true
|
||||||
|
))) {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
|
||||||
|
|
||||||
|
this[kAddClient](pool)
|
||||||
|
pool.on('connect', () => {
|
||||||
|
pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
|
||||||
|
})
|
||||||
|
|
||||||
|
pool.on('connectionError', () => {
|
||||||
|
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
})
|
||||||
|
|
||||||
|
pool.on('disconnect', (...args) => {
|
||||||
|
const err = args[2]
|
||||||
|
if (err && err.code === 'UND_ERR_SOCKET') {
|
||||||
|
// decrease the weight of the pool.
|
||||||
|
pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const client of this[kClients]) {
|
||||||
|
client[kWeight] = this[kMaxWeightPerServer]
|
||||||
|
}
|
||||||
|
|
||||||
|
this._updateBalancedPoolStats()
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateBalancedPoolStats () {
|
||||||
|
this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
removeUpstream (upstream) {
|
||||||
|
const upstreamOrigin = parseOrigin(upstream).origin
|
||||||
|
|
||||||
|
const pool = this[kClients].find((pool) => (
|
||||||
|
pool[kUrl].origin === upstreamOrigin &&
|
||||||
|
pool.closed !== true &&
|
||||||
|
pool.destroyed !== true
|
||||||
|
))
|
||||||
|
|
||||||
|
if (pool) {
|
||||||
|
this[kRemoveClient](pool)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
get upstreams () {
|
||||||
|
return this[kClients]
|
||||||
|
.filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
|
||||||
|
.map((p) => p[kUrl].origin)
|
||||||
|
}
|
||||||
|
|
||||||
|
[kGetDispatcher] () {
|
||||||
|
// We validate that pools is greater than 0,
|
||||||
|
// otherwise we would have to wait until an upstream
|
||||||
|
// is added, which might never happen.
|
||||||
|
if (this[kClients].length === 0) {
|
||||||
|
throw new BalancedPoolMissingUpstreamError()
|
||||||
|
}
|
||||||
|
|
||||||
|
const dispatcher = this[kClients].find(dispatcher => (
|
||||||
|
!dispatcher[kNeedDrain] &&
|
||||||
|
dispatcher.closed !== true &&
|
||||||
|
dispatcher.destroyed !== true
|
||||||
|
))
|
||||||
|
|
||||||
|
if (!dispatcher) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
|
||||||
|
|
||||||
|
if (allClientsBusy) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let counter = 0
|
||||||
|
|
||||||
|
let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
|
||||||
|
|
||||||
|
while (counter++ < this[kClients].length) {
|
||||||
|
this[kIndex] = (this[kIndex] + 1) % this[kClients].length
|
||||||
|
const pool = this[kClients][this[kIndex]]
|
||||||
|
|
||||||
|
// find pool index with the largest weight
|
||||||
|
if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
|
||||||
|
maxWeightIndex = this[kIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
// decrease the current weight every `this[kClients].length`.
|
||||||
|
if (this[kIndex] === 0) {
|
||||||
|
// Set the current weight to the next lower weight.
|
||||||
|
this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
|
||||||
|
|
||||||
|
if (this[kCurrentWeight] <= 0) {
|
||||||
|
this[kCurrentWeight] = this[kMaxWeightPerServer]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
|
||||||
|
return pool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
|
||||||
|
this[kIndex] = maxWeightIndex
|
||||||
|
return this[kClients][maxWeightIndex]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = BalancedPool
|
838
node_modules/undici/lib/cache/cache.js
generated
vendored
Normal file
838
node_modules/undici/lib/cache/cache.js
generated
vendored
Normal file
@ -0,0 +1,838 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { kConstruct } = require('./symbols')
|
||||||
|
const { urlEquals, fieldValues: getFieldValues } = require('./util')
|
||||||
|
const { kEnumerableProperty, isDisturbed } = require('../core/util')
|
||||||
|
const { kHeadersList } = require('../core/symbols')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { Response, cloneResponse } = require('../fetch/response')
|
||||||
|
const { Request } = require('../fetch/request')
|
||||||
|
const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')
|
||||||
|
const { fetching } = require('../fetch/index')
|
||||||
|
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
|
||||||
|
const assert = require('assert')
|
||||||
|
const { getGlobalDispatcher } = require('../global')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
|
||||||
|
* @typedef {Object} CacheBatchOperation
|
||||||
|
* @property {'delete' | 'put'} type
|
||||||
|
* @property {any} request
|
||||||
|
* @property {any} response
|
||||||
|
* @property {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
|
||||||
|
* @typedef {[any, any][]} requestResponseList
|
||||||
|
*/
|
||||||
|
|
||||||
|
class Cache {
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
|
||||||
|
* @type {requestResponseList}
|
||||||
|
*/
|
||||||
|
#relevantRequestResponseList
|
||||||
|
|
||||||
|
constructor () {
|
||||||
|
if (arguments[0] !== kConstruct) {
|
||||||
|
webidl.illegalConstructor()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#relevantRequestResponseList = arguments[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
async match (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
const p = await this.matchAll(request, options)
|
||||||
|
|
||||||
|
if (p.length === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return p[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
async matchAll (request = undefined, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
|
||||||
|
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request !== undefined) {
|
||||||
|
if (request instanceof Request) {
|
||||||
|
// 2.1.1
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
// 2.1.2
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
} else if (typeof request === 'string') {
|
||||||
|
// 2.2.1
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
// 5.1
|
||||||
|
const responses = []
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (request === undefined) {
|
||||||
|
// 5.2.1
|
||||||
|
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||||
|
responses.push(requestResponse[1])
|
||||||
|
}
|
||||||
|
} else { // 5.3
|
||||||
|
// 5.3.1
|
||||||
|
const requestResponses = this.#queryCache(r, options)
|
||||||
|
|
||||||
|
// 5.3.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
responses.push(requestResponse[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
// We don't implement CORs so we don't need to loop over the responses, yay!
|
||||||
|
|
||||||
|
// 5.5.1
|
||||||
|
const responseList = []
|
||||||
|
|
||||||
|
// 5.5.2
|
||||||
|
for (const response of responses) {
|
||||||
|
// 5.5.2.1
|
||||||
|
const responseObject = new Response(response.body?.source ?? null)
|
||||||
|
const body = responseObject[kState].body
|
||||||
|
responseObject[kState] = response
|
||||||
|
responseObject[kState].body = body
|
||||||
|
responseObject[kHeaders][kHeadersList] = response.headersList
|
||||||
|
responseObject[kHeaders][kGuard] = 'immutable'
|
||||||
|
|
||||||
|
responseList.push(responseObject)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
return Object.freeze(responseList)
|
||||||
|
}
|
||||||
|
|
||||||
|
async add (request) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
const requests = [request]
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const responseArrayPromise = this.addAll(requests)
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
return await responseArrayPromise
|
||||||
|
}
|
||||||
|
|
||||||
|
async addAll (requests) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
|
||||||
|
|
||||||
|
requests = webidl.converters['sequence<RequestInfo>'](requests)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
const responsePromises = []
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const requestList = []
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
for (const request of requests) {
|
||||||
|
if (typeof request === 'string') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3.1
|
||||||
|
const r = request[kState]
|
||||||
|
|
||||||
|
// 3.2
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Expected http/s scheme when method is not GET.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
/** @type {ReturnType<typeof fetching>[]} */
|
||||||
|
const fetchControllers = []
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
for (const request of requests) {
|
||||||
|
// 5.1
|
||||||
|
const r = new Request(request)[kState]
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Expected http/s scheme.'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
r.initiator = 'fetch'
|
||||||
|
r.destination = 'subresource'
|
||||||
|
|
||||||
|
// 5.5
|
||||||
|
requestList.push(r)
|
||||||
|
|
||||||
|
// 5.6
|
||||||
|
const responsePromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 5.7
|
||||||
|
fetchControllers.push(fetching({
|
||||||
|
request: r,
|
||||||
|
dispatcher: getGlobalDispatcher(),
|
||||||
|
processResponse (response) {
|
||||||
|
// 1.
|
||||||
|
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
|
||||||
|
responsePromise.reject(webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'Received an invalid status code or the request failed.'
|
||||||
|
}))
|
||||||
|
} else if (response.headersList.contains('vary')) { // 2.
|
||||||
|
// 2.1
|
||||||
|
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
// 2.2.1
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
responsePromise.reject(webidl.errors.exception({
|
||||||
|
header: 'Cache.addAll',
|
||||||
|
message: 'invalid vary field value'
|
||||||
|
}))
|
||||||
|
|
||||||
|
for (const controller of fetchControllers) {
|
||||||
|
controller.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
processResponseEndOfBody (response) {
|
||||||
|
// 1.
|
||||||
|
if (response.aborted) {
|
||||||
|
responsePromise.reject(new DOMException('aborted', 'AbortError'))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
responsePromise.resolve(response)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
// 5.8
|
||||||
|
responsePromises.push(responsePromise.promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
const p = Promise.all(responsePromises)
|
||||||
|
|
||||||
|
// 7.
|
||||||
|
const responses = await p
|
||||||
|
|
||||||
|
// 7.1
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
// 7.2
|
||||||
|
let index = 0
|
||||||
|
|
||||||
|
// 7.3
|
||||||
|
for (const response of responses) {
|
||||||
|
// 7.3.1
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'put', // 7.3.2
|
||||||
|
request: requestList[index], // 7.3.3
|
||||||
|
response // 7.3.4
|
||||||
|
}
|
||||||
|
|
||||||
|
operations.push(operation) // 7.3.5
|
||||||
|
|
||||||
|
index++ // 7.3.6
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.5
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 7.6.1
|
||||||
|
let errorData = null
|
||||||
|
|
||||||
|
// 7.6.2
|
||||||
|
try {
|
||||||
|
this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.6.3
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 7.6.3.1
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve(undefined)
|
||||||
|
} else {
|
||||||
|
// 7.6.3.2
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// 7.7
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
async put (request, response) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
response = webidl.converters.Response(response)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let innerRequest = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request instanceof Request) {
|
||||||
|
innerRequest = request[kState]
|
||||||
|
} else { // 3.
|
||||||
|
innerRequest = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Expected an http/s scheme when method is not GET'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
const innerResponse = response[kState]
|
||||||
|
|
||||||
|
// 6.
|
||||||
|
if (innerResponse.status === 206) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Got 206 status'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7.
|
||||||
|
if (innerResponse.headersList.contains('vary')) {
|
||||||
|
// 7.1.
|
||||||
|
const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
|
||||||
|
|
||||||
|
// 7.2.
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
// 7.2.1
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Got * vary field value'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8.
|
||||||
|
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.put',
|
||||||
|
message: 'Response body is locked or disturbed'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9.
|
||||||
|
const clonedResponse = cloneResponse(innerResponse)
|
||||||
|
|
||||||
|
// 10.
|
||||||
|
const bodyReadPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 11.
|
||||||
|
if (innerResponse.body != null) {
|
||||||
|
// 11.1
|
||||||
|
const stream = innerResponse.body.stream
|
||||||
|
|
||||||
|
// 11.2
|
||||||
|
const reader = stream.getReader()
|
||||||
|
|
||||||
|
// 11.3
|
||||||
|
readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
|
||||||
|
} else {
|
||||||
|
bodyReadPromise.resolve(undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12.
|
||||||
|
/** @type {CacheBatchOperation[]} */
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
// 13.
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'put', // 14.
|
||||||
|
request: innerRequest, // 15.
|
||||||
|
response: clonedResponse // 16.
|
||||||
|
}
|
||||||
|
|
||||||
|
// 17.
|
||||||
|
operations.push(operation)
|
||||||
|
|
||||||
|
// 19.
|
||||||
|
const bytes = await bodyReadPromise.promise
|
||||||
|
|
||||||
|
if (clonedResponse.body != null) {
|
||||||
|
clonedResponse.body.source = bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
// 19.1
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 19.2.1
|
||||||
|
let errorData = null
|
||||||
|
|
||||||
|
// 19.2.2
|
||||||
|
try {
|
||||||
|
this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
// 19.2.3
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 19.2.3.1
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve()
|
||||||
|
} else { // 19.2.3.2
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {Request}
|
||||||
|
*/
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
if (request instanceof Request) {
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert(typeof request === 'string')
|
||||||
|
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {CacheBatchOperation[]} */
|
||||||
|
const operations = []
|
||||||
|
|
||||||
|
/** @type {CacheBatchOperation} */
|
||||||
|
const operation = {
|
||||||
|
type: 'delete',
|
||||||
|
request: r,
|
||||||
|
options
|
||||||
|
}
|
||||||
|
|
||||||
|
operations.push(operation)
|
||||||
|
|
||||||
|
const cacheJobPromise = createDeferredPromise()
|
||||||
|
|
||||||
|
let errorData = null
|
||||||
|
let requestResponses
|
||||||
|
|
||||||
|
try {
|
||||||
|
requestResponses = this.#batchCacheOperations(operations)
|
||||||
|
} catch (e) {
|
||||||
|
errorData = e
|
||||||
|
}
|
||||||
|
|
||||||
|
queueMicrotask(() => {
|
||||||
|
if (errorData === null) {
|
||||||
|
cacheJobPromise.resolve(!!requestResponses?.length)
|
||||||
|
} else {
|
||||||
|
cacheJobPromise.reject(errorData)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return cacheJobPromise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
|
||||||
|
* @param {any} request
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
* @returns {readonly Request[]}
|
||||||
|
*/
|
||||||
|
async keys (request = undefined, options = {}) {
|
||||||
|
webidl.brandCheck(this, Cache)
|
||||||
|
|
||||||
|
if (request !== undefined) request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.CacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
let r = null
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
if (request !== undefined) {
|
||||||
|
// 2.1
|
||||||
|
if (request instanceof Request) {
|
||||||
|
// 2.1.1
|
||||||
|
r = request[kState]
|
||||||
|
|
||||||
|
// 2.1.2
|
||||||
|
if (r.method !== 'GET' && !options.ignoreMethod) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
} else if (typeof request === 'string') { // 2.2
|
||||||
|
r = new Request(request)[kState]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.
|
||||||
|
const promise = createDeferredPromise()
|
||||||
|
|
||||||
|
// 5.
|
||||||
|
// 5.1
|
||||||
|
const requests = []
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
if (request === undefined) {
|
||||||
|
// 5.2.1
|
||||||
|
for (const requestResponse of this.#relevantRequestResponseList) {
|
||||||
|
// 5.2.1.1
|
||||||
|
requests.push(requestResponse[0])
|
||||||
|
}
|
||||||
|
} else { // 5.3
|
||||||
|
// 5.3.1
|
||||||
|
const requestResponses = this.#queryCache(r, options)
|
||||||
|
|
||||||
|
// 5.3.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
// 5.3.2.1
|
||||||
|
requests.push(requestResponse[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4
|
||||||
|
queueMicrotask(() => {
|
||||||
|
// 5.4.1
|
||||||
|
const requestList = []
|
||||||
|
|
||||||
|
// 5.4.2
|
||||||
|
for (const request of requests) {
|
||||||
|
const requestObject = new Request('https://a')
|
||||||
|
requestObject[kState] = request
|
||||||
|
requestObject[kHeaders][kHeadersList] = request.headersList
|
||||||
|
requestObject[kHeaders][kGuard] = 'immutable'
|
||||||
|
requestObject[kRealm] = request.client
|
||||||
|
|
||||||
|
// 5.4.2.1
|
||||||
|
requestList.push(requestObject)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5.4.3
|
||||||
|
promise.resolve(Object.freeze(requestList))
|
||||||
|
})
|
||||||
|
|
||||||
|
return promise.promise
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
|
||||||
|
* @param {CacheBatchOperation[]} operations
|
||||||
|
* @returns {requestResponseList}
|
||||||
|
*/
|
||||||
|
#batchCacheOperations (operations) {
|
||||||
|
// 1.
|
||||||
|
const cache = this.#relevantRequestResponseList
|
||||||
|
|
||||||
|
// 2.
|
||||||
|
const backupCache = [...cache]
|
||||||
|
|
||||||
|
// 3.
|
||||||
|
const addedItems = []
|
||||||
|
|
||||||
|
// 4.1
|
||||||
|
const resultList = []
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 4.2
|
||||||
|
for (const operation of operations) {
|
||||||
|
// 4.2.1
|
||||||
|
if (operation.type !== 'delete' && operation.type !== 'put') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'operation type does not match "delete" or "put"'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.2
|
||||||
|
if (operation.type === 'delete' && operation.response != null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'delete operation should not have an associated response'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.3
|
||||||
|
if (this.#queryCache(operation.request, operation.options, addedItems).length) {
|
||||||
|
throw new DOMException('???', 'InvalidStateError')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.4
|
||||||
|
let requestResponses
|
||||||
|
|
||||||
|
// 4.2.5
|
||||||
|
if (operation.type === 'delete') {
|
||||||
|
// 4.2.5.1
|
||||||
|
requestResponses = this.#queryCache(operation.request, operation.options)
|
||||||
|
|
||||||
|
// TODO: the spec is wrong, this is needed to pass WPTs
|
||||||
|
if (requestResponses.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.5.2
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
const idx = cache.indexOf(requestResponse)
|
||||||
|
assert(idx !== -1)
|
||||||
|
|
||||||
|
// 4.2.5.2.1
|
||||||
|
cache.splice(idx, 1)
|
||||||
|
}
|
||||||
|
} else if (operation.type === 'put') { // 4.2.6
|
||||||
|
// 4.2.6.1
|
||||||
|
if (operation.response == null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'put operation should have an associated response'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.2
|
||||||
|
const r = operation.request
|
||||||
|
|
||||||
|
// 4.2.6.3
|
||||||
|
if (!urlIsHttpHttpsScheme(r.url)) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'expected http or https scheme'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.4
|
||||||
|
if (r.method !== 'GET') {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'not get method'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.5
|
||||||
|
if (operation.options != null) {
|
||||||
|
throw webidl.errors.exception({
|
||||||
|
header: 'Cache.#batchCacheOperations',
|
||||||
|
message: 'options must not be defined'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.6
|
||||||
|
requestResponses = this.#queryCache(operation.request)
|
||||||
|
|
||||||
|
// 4.2.6.7
|
||||||
|
for (const requestResponse of requestResponses) {
|
||||||
|
const idx = cache.indexOf(requestResponse)
|
||||||
|
assert(idx !== -1)
|
||||||
|
|
||||||
|
// 4.2.6.7.1
|
||||||
|
cache.splice(idx, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.6.8
|
||||||
|
cache.push([operation.request, operation.response])
|
||||||
|
|
||||||
|
// 4.2.6.10
|
||||||
|
addedItems.push([operation.request, operation.response])
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.2.7
|
||||||
|
resultList.push([operation.request, operation.response])
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4.3
|
||||||
|
return resultList
|
||||||
|
} catch (e) { // 5.
|
||||||
|
// 5.1
|
||||||
|
this.#relevantRequestResponseList.length = 0
|
||||||
|
|
||||||
|
// 5.2
|
||||||
|
this.#relevantRequestResponseList = backupCache
|
||||||
|
|
||||||
|
// 5.3
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#query-cache
|
||||||
|
* @param {any} requestQuery
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions} options
|
||||||
|
* @param {requestResponseList} targetStorage
|
||||||
|
* @returns {requestResponseList}
|
||||||
|
*/
|
||||||
|
#queryCache (requestQuery, options, targetStorage) {
|
||||||
|
/** @type {requestResponseList} */
|
||||||
|
const resultList = []
|
||||||
|
|
||||||
|
const storage = targetStorage ?? this.#relevantRequestResponseList
|
||||||
|
|
||||||
|
for (const requestResponse of storage) {
|
||||||
|
const [cachedRequest, cachedResponse] = requestResponse
|
||||||
|
if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
|
||||||
|
resultList.push(requestResponse)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resultList
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
|
||||||
|
* @param {any} requestQuery
|
||||||
|
* @param {any} request
|
||||||
|
* @param {any | null} response
|
||||||
|
* @param {import('../../types/cache').CacheQueryOptions | undefined} options
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
|
||||||
|
// if (options?.ignoreMethod === false && request.method === 'GET') {
|
||||||
|
// return false
|
||||||
|
// }
|
||||||
|
|
||||||
|
const queryURL = new URL(requestQuery.url)
|
||||||
|
|
||||||
|
const cachedURL = new URL(request.url)
|
||||||
|
|
||||||
|
if (options?.ignoreSearch) {
|
||||||
|
cachedURL.search = ''
|
||||||
|
|
||||||
|
queryURL.search = ''
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!urlEquals(queryURL, cachedURL, true)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
response == null ||
|
||||||
|
options?.ignoreVary ||
|
||||||
|
!response.headersList.contains('vary')
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldValues = getFieldValues(response.headersList.get('vary'))
|
||||||
|
|
||||||
|
for (const fieldValue of fieldValues) {
|
||||||
|
if (fieldValue === '*') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestValue = request.headersList.get(fieldValue)
|
||||||
|
const queryValue = requestQuery.headersList.get(fieldValue)
|
||||||
|
|
||||||
|
// If one has the header and the other doesn't, or one has
|
||||||
|
// a different value than the other, return false
|
||||||
|
if (requestValue !== queryValue) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperties(Cache.prototype, {
|
||||||
|
[Symbol.toStringTag]: {
|
||||||
|
value: 'Cache',
|
||||||
|
configurable: true
|
||||||
|
},
|
||||||
|
match: kEnumerableProperty,
|
||||||
|
matchAll: kEnumerableProperty,
|
||||||
|
add: kEnumerableProperty,
|
||||||
|
addAll: kEnumerableProperty,
|
||||||
|
put: kEnumerableProperty,
|
||||||
|
delete: kEnumerableProperty,
|
||||||
|
keys: kEnumerableProperty
|
||||||
|
})
|
||||||
|
|
||||||
|
const cacheQueryOptionConverters = [
|
||||||
|
{
|
||||||
|
key: 'ignoreSearch',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'ignoreMethod',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'ignoreVary',
|
||||||
|
converter: webidl.converters.boolean,
|
||||||
|
defaultValue: false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
|
||||||
|
|
||||||
|
webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
|
||||||
|
...cacheQueryOptionConverters,
|
||||||
|
{
|
||||||
|
key: 'cacheName',
|
||||||
|
converter: webidl.converters.DOMString
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
webidl.converters.Response = webidl.interfaceConverter(Response)
|
||||||
|
|
||||||
|
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
|
||||||
|
webidl.converters.RequestInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
Cache
|
||||||
|
}
|
144
node_modules/undici/lib/cache/cachestorage.js
generated
vendored
Normal file
144
node_modules/undici/lib/cache/cachestorage.js
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { kConstruct } = require('./symbols')
|
||||||
|
const { Cache } = require('./cache')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { kEnumerableProperty } = require('../core/util')
|
||||||
|
|
||||||
|
class CacheStorage {
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
|
||||||
|
* @type {Map<string, import('./cache').requestResponseList}
|
||||||
|
*/
|
||||||
|
#caches = new Map()
|
||||||
|
|
||||||
|
constructor () {
|
||||||
|
if (arguments[0] !== kConstruct) {
|
||||||
|
webidl.illegalConstructor()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async match (request, options = {}) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
|
||||||
|
|
||||||
|
request = webidl.converters.RequestInfo(request)
|
||||||
|
options = webidl.converters.MultiCacheQueryOptions(options)
|
||||||
|
|
||||||
|
// 1.
|
||||||
|
if (options.cacheName != null) {
|
||||||
|
// 1.1.1.1
|
||||||
|
if (this.#caches.has(options.cacheName)) {
|
||||||
|
// 1.1.1.1.1
|
||||||
|
const cacheList = this.#caches.get(options.cacheName)
|
||||||
|
const cache = new Cache(kConstruct, cacheList)
|
||||||
|
|
||||||
|
return await cache.match(request, options)
|
||||||
|
}
|
||||||
|
} else { // 2.
|
||||||
|
// 2.2
|
||||||
|
for (const cacheList of this.#caches.values()) {
|
||||||
|
const cache = new Cache(kConstruct, cacheList)
|
||||||
|
|
||||||
|
// 2.2.1.2
|
||||||
|
const response = await cache.match(request, options)
|
||||||
|
|
||||||
|
if (response !== undefined) {
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-has
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
async has (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
// 2.1.1
|
||||||
|
// 2.2
|
||||||
|
return this.#caches.has(cacheName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<Cache>}
|
||||||
|
*/
|
||||||
|
async open (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
// 2.1
|
||||||
|
if (this.#caches.has(cacheName)) {
|
||||||
|
// await caches.open('v1') !== await caches.open('v1')
|
||||||
|
|
||||||
|
// 2.1.1
|
||||||
|
const cache = this.#caches.get(cacheName)
|
||||||
|
|
||||||
|
// 2.1.1.1
|
||||||
|
return new Cache(kConstruct, cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
const cache = []
|
||||||
|
|
||||||
|
// 2.3
|
||||||
|
this.#caches.set(cacheName, cache)
|
||||||
|
|
||||||
|
// 2.4
|
||||||
|
return new Cache(kConstruct, cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
|
||||||
|
* @param {string} cacheName
|
||||||
|
* @returns {Promise<boolean>}
|
||||||
|
*/
|
||||||
|
async delete (cacheName) {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
|
||||||
|
|
||||||
|
cacheName = webidl.converters.DOMString(cacheName)
|
||||||
|
|
||||||
|
return this.#caches.delete(cacheName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
|
||||||
|
* @returns {string[]}
|
||||||
|
*/
|
||||||
|
async keys () {
|
||||||
|
webidl.brandCheck(this, CacheStorage)
|
||||||
|
|
||||||
|
// 2.1
|
||||||
|
const keys = this.#caches.keys()
|
||||||
|
|
||||||
|
// 2.2
|
||||||
|
return [...keys]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperties(CacheStorage.prototype, {
|
||||||
|
[Symbol.toStringTag]: {
|
||||||
|
value: 'CacheStorage',
|
||||||
|
configurable: true
|
||||||
|
},
|
||||||
|
match: kEnumerableProperty,
|
||||||
|
has: kEnumerableProperty,
|
||||||
|
open: kEnumerableProperty,
|
||||||
|
delete: kEnumerableProperty,
|
||||||
|
keys: kEnumerableProperty
|
||||||
|
})
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
CacheStorage
|
||||||
|
}
|
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
Normal file
5
node_modules/undici/lib/cache/symbols.js
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
kConstruct: Symbol('constructable')
|
||||||
|
}
|
49
node_modules/undici/lib/cache/util.js
generated
vendored
Normal file
49
node_modules/undici/lib/cache/util.js
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { URLSerializer } = require('../fetch/dataURL')
|
||||||
|
const { isValidHeaderName } = require('../fetch/util')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://url.spec.whatwg.org/#concept-url-equals
|
||||||
|
* @param {URL} A
|
||||||
|
* @param {URL} B
|
||||||
|
* @param {boolean | undefined} excludeFragment
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function urlEquals (A, B, excludeFragment = false) {
|
||||||
|
const serializedA = URLSerializer(A, excludeFragment)
|
||||||
|
|
||||||
|
const serializedB = URLSerializer(B, excludeFragment)
|
||||||
|
|
||||||
|
return serializedA === serializedB
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
|
||||||
|
* @param {string} header
|
||||||
|
*/
|
||||||
|
function fieldValues (header) {
|
||||||
|
assert(header !== null)
|
||||||
|
|
||||||
|
const values = []
|
||||||
|
|
||||||
|
for (let value of header.split(',')) {
|
||||||
|
value = value.trim()
|
||||||
|
|
||||||
|
if (!value.length) {
|
||||||
|
continue
|
||||||
|
} else if (!isValidHeaderName(value)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
values.push(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
urlEquals,
|
||||||
|
fieldValues
|
||||||
|
}
|
2287
node_modules/undici/lib/client.js
generated
vendored
Normal file
2287
node_modules/undici/lib/client.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
48
node_modules/undici/lib/compat/dispatcher-weakref.js
generated
vendored
Normal file
48
node_modules/undici/lib/compat/dispatcher-weakref.js
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
/* istanbul ignore file: only for Node 12 */
|
||||||
|
|
||||||
|
const { kConnected, kSize } = require('../core/symbols')
|
||||||
|
|
||||||
|
class CompatWeakRef {
|
||||||
|
constructor (value) {
|
||||||
|
this.value = value
|
||||||
|
}
|
||||||
|
|
||||||
|
deref () {
|
||||||
|
return this.value[kConnected] === 0 && this.value[kSize] === 0
|
||||||
|
? undefined
|
||||||
|
: this.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class CompatFinalizer {
|
||||||
|
constructor (finalizer) {
|
||||||
|
this.finalizer = finalizer
|
||||||
|
}
|
||||||
|
|
||||||
|
register (dispatcher, key) {
|
||||||
|
if (dispatcher.on) {
|
||||||
|
dispatcher.on('disconnect', () => {
|
||||||
|
if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {
|
||||||
|
this.finalizer(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function () {
|
||||||
|
// FIXME: remove workaround when the Node bug is fixed
|
||||||
|
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||||
|
if (process.env.NODE_V8_COVERAGE) {
|
||||||
|
return {
|
||||||
|
WeakRef: CompatWeakRef,
|
||||||
|
FinalizationRegistry: CompatFinalizer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
WeakRef: global.WeakRef || CompatWeakRef,
|
||||||
|
FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
|
||||||
|
}
|
||||||
|
}
|
12
node_modules/undici/lib/cookies/constants.js
generated
vendored
Normal file
12
node_modules/undici/lib/cookies/constants.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
|
||||||
|
const maxAttributeValueSize = 1024
|
||||||
|
|
||||||
|
// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
|
||||||
|
const maxNameValuePairSize = 4096
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
maxAttributeValueSize,
|
||||||
|
maxNameValuePairSize
|
||||||
|
}
|
184
node_modules/undici/lib/cookies/index.js
generated
vendored
Normal file
184
node_modules/undici/lib/cookies/index.js
generated
vendored
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { parseSetCookie } = require('./parse')
|
||||||
|
const { stringify, getHeadersList } = require('./util')
|
||||||
|
const { webidl } = require('../fetch/webidl')
|
||||||
|
const { Headers } = require('../fetch/headers')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} Cookie
|
||||||
|
* @property {string} name
|
||||||
|
* @property {string} value
|
||||||
|
* @property {Date|number|undefined} expires
|
||||||
|
* @property {number|undefined} maxAge
|
||||||
|
* @property {string|undefined} domain
|
||||||
|
* @property {string|undefined} path
|
||||||
|
* @property {boolean|undefined} secure
|
||||||
|
* @property {boolean|undefined} httpOnly
|
||||||
|
* @property {'Strict'|'Lax'|'None'} sameSite
|
||||||
|
* @property {string[]} unparsed
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @returns {Record<string, string>}
|
||||||
|
*/
|
||||||
|
function getCookies (headers) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
const cookie = headers.get('cookie')
|
||||||
|
const out = {}
|
||||||
|
|
||||||
|
if (!cookie) {
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const piece of cookie.split(';')) {
|
||||||
|
const [name, ...value] = piece.split('=')
|
||||||
|
|
||||||
|
out[name.trim()] = value.join('=')
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @param {string} name
|
||||||
|
* @param {{ path?: string, domain?: string }|undefined} attributes
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function deleteCookie (headers, name, attributes) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
name = webidl.converters.DOMString(name)
|
||||||
|
attributes = webidl.converters.DeleteCookieAttributes(attributes)
|
||||||
|
|
||||||
|
// Matches behavior of
|
||||||
|
// https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
|
||||||
|
setCookie(headers, {
|
||||||
|
name,
|
||||||
|
value: '',
|
||||||
|
expires: new Date(0),
|
||||||
|
...attributes
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @returns {Cookie[]}
|
||||||
|
*/
|
||||||
|
function getSetCookies (headers) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
const cookies = getHeadersList(headers).cookies
|
||||||
|
|
||||||
|
if (!cookies) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
// In older versions of undici, cookies is a list of name:value.
|
||||||
|
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Headers} headers
|
||||||
|
* @param {Cookie} cookie
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function setCookie (headers, cookie) {
|
||||||
|
webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
|
||||||
|
|
||||||
|
webidl.brandCheck(headers, Headers, { strict: false })
|
||||||
|
|
||||||
|
cookie = webidl.converters.Cookie(cookie)
|
||||||
|
|
||||||
|
const str = stringify(cookie)
|
||||||
|
|
||||||
|
if (str) {
|
||||||
|
headers.append('Set-Cookie', stringify(cookie))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'path',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'domain',
|
||||||
|
defaultValue: null
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
webidl.converters.Cookie = webidl.dictionaryConverter([
|
||||||
|
{
|
||||||
|
converter: webidl.converters.DOMString,
|
||||||
|
key: 'name'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.converters.DOMString,
|
||||||
|
key: 'value'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter((value) => {
|
||||||
|
if (typeof value === 'number') {
|
||||||
|
return webidl.converters['unsigned long long'](value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Date(value)
|
||||||
|
}),
|
||||||
|
key: 'expires',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters['long long']),
|
||||||
|
key: 'maxAge',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'domain',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.DOMString),
|
||||||
|
key: 'path',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||||
|
key: 'secure',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.nullableConverter(webidl.converters.boolean),
|
||||||
|
key: 'httpOnly',
|
||||||
|
defaultValue: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.converters.USVString,
|
||||||
|
key: 'sameSite',
|
||||||
|
allowedValues: ['Strict', 'Lax', 'None']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
converter: webidl.sequenceConverter(webidl.converters.DOMString),
|
||||||
|
key: 'unparsed',
|
||||||
|
defaultValue: []
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getCookies,
|
||||||
|
deleteCookie,
|
||||||
|
getSetCookies,
|
||||||
|
setCookie
|
||||||
|
}
|
317
node_modules/undici/lib/cookies/parse.js
generated
vendored
Normal file
317
node_modules/undici/lib/cookies/parse.js
generated
vendored
Normal file
@ -0,0 +1,317 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
||||||
|
const { isCTLExcludingHtab } = require('./util')
|
||||||
|
const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
|
||||||
|
const assert = require('assert')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @description Parses the field-value attributes of a set-cookie header string.
|
||||||
|
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||||
|
* @param {string} header
|
||||||
|
* @returns if the header is invalid, null will be returned
|
||||||
|
*/
|
||||||
|
function parseSetCookie (header) {
|
||||||
|
// 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
|
||||||
|
// character (CTL characters excluding HTAB): Abort these steps and
|
||||||
|
// ignore the set-cookie-string entirely.
|
||||||
|
if (isCTLExcludingHtab(header)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
let nameValuePair = ''
|
||||||
|
let unparsedAttributes = ''
|
||||||
|
let name = ''
|
||||||
|
let value = ''
|
||||||
|
|
||||||
|
// 2. If the set-cookie-string contains a %x3B (";") character:
|
||||||
|
if (header.includes(';')) {
|
||||||
|
// 1. The name-value-pair string consists of the characters up to,
|
||||||
|
// but not including, the first %x3B (";"), and the unparsed-
|
||||||
|
// attributes consist of the remainder of the set-cookie-string
|
||||||
|
// (including the %x3B (";") in question).
|
||||||
|
const position = { position: 0 }
|
||||||
|
|
||||||
|
nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
|
||||||
|
unparsedAttributes = header.slice(position.position)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. The name-value-pair string consists of all the characters
|
||||||
|
// contained in the set-cookie-string, and the unparsed-
|
||||||
|
// attributes is the empty string.
|
||||||
|
nameValuePair = header
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. If the name-value-pair string lacks a %x3D ("=") character, then
|
||||||
|
// the name string is empty, and the value string is the value of
|
||||||
|
// name-value-pair.
|
||||||
|
if (!nameValuePair.includes('=')) {
|
||||||
|
value = nameValuePair
|
||||||
|
} else {
|
||||||
|
// Otherwise, the name string consists of the characters up to, but
|
||||||
|
// not including, the first %x3D ("=") character, and the (possibly
|
||||||
|
// empty) value string consists of the characters after the first
|
||||||
|
// %x3D ("=") character.
|
||||||
|
const position = { position: 0 }
|
||||||
|
name = collectASequenceOfCodePointsFast(
|
||||||
|
'=',
|
||||||
|
nameValuePair,
|
||||||
|
position
|
||||||
|
)
|
||||||
|
value = nameValuePair.slice(position.position + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Remove any leading or trailing WSP characters from the name
|
||||||
|
// string and the value string.
|
||||||
|
name = name.trim()
|
||||||
|
value = value.trim()
|
||||||
|
|
||||||
|
// 5. If the sum of the lengths of the name string and the value string
|
||||||
|
// is more than 4096 octets, abort these steps and ignore the set-
|
||||||
|
// cookie-string entirely.
|
||||||
|
if (name.length + value.length > maxNameValuePairSize) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. The cookie-name is the name string, and the cookie-value is the
|
||||||
|
// value string.
|
||||||
|
return {
|
||||||
|
name, value, ...parseUnparsedAttributes(unparsedAttributes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the remaining attributes of a set-cookie header
|
||||||
|
* @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
|
||||||
|
* @param {string} unparsedAttributes
|
||||||
|
* @param {[Object.<string, unknown>]={}} cookieAttributeList
|
||||||
|
*/
|
||||||
|
function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
|
||||||
|
// 1. If the unparsed-attributes string is empty, skip the rest of
|
||||||
|
// these steps.
|
||||||
|
if (unparsedAttributes.length === 0) {
|
||||||
|
return cookieAttributeList
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Discard the first character of the unparsed-attributes (which
|
||||||
|
// will be a %x3B (";") character).
|
||||||
|
assert(unparsedAttributes[0] === ';')
|
||||||
|
unparsedAttributes = unparsedAttributes.slice(1)
|
||||||
|
|
||||||
|
let cookieAv = ''
|
||||||
|
|
||||||
|
// 3. If the remaining unparsed-attributes contains a %x3B (";")
|
||||||
|
// character:
|
||||||
|
if (unparsedAttributes.includes(';')) {
|
||||||
|
// 1. Consume the characters of the unparsed-attributes up to, but
|
||||||
|
// not including, the first %x3B (";") character.
|
||||||
|
cookieAv = collectASequenceOfCodePointsFast(
|
||||||
|
';',
|
||||||
|
unparsedAttributes,
|
||||||
|
{ position: 0 }
|
||||||
|
)
|
||||||
|
unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. Consume the remainder of the unparsed-attributes.
|
||||||
|
cookieAv = unparsedAttributes
|
||||||
|
unparsedAttributes = ''
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let the cookie-av string be the characters consumed in this step.
|
||||||
|
|
||||||
|
let attributeName = ''
|
||||||
|
let attributeValue = ''
|
||||||
|
|
||||||
|
// 4. If the cookie-av string contains a %x3D ("=") character:
|
||||||
|
if (cookieAv.includes('=')) {
|
||||||
|
// 1. The (possibly empty) attribute-name string consists of the
|
||||||
|
// characters up to, but not including, the first %x3D ("=")
|
||||||
|
// character, and the (possibly empty) attribute-value string
|
||||||
|
// consists of the characters after the first %x3D ("=")
|
||||||
|
// character.
|
||||||
|
const position = { position: 0 }
|
||||||
|
|
||||||
|
attributeName = collectASequenceOfCodePointsFast(
|
||||||
|
'=',
|
||||||
|
cookieAv,
|
||||||
|
position
|
||||||
|
)
|
||||||
|
attributeValue = cookieAv.slice(position.position + 1)
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. The attribute-name string consists of the entire cookie-av
|
||||||
|
// string, and the attribute-value string is empty.
|
||||||
|
attributeName = cookieAv
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Remove any leading or trailing WSP characters from the attribute-
|
||||||
|
// name string and the attribute-value string.
|
||||||
|
attributeName = attributeName.trim()
|
||||||
|
attributeValue = attributeValue.trim()
|
||||||
|
|
||||||
|
// 6. If the attribute-value is longer than 1024 octets, ignore the
|
||||||
|
// cookie-av string and return to Step 1 of this algorithm.
|
||||||
|
if (attributeValue.length > maxAttributeValueSize) {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7. Process the attribute-name and attribute-value according to the
|
||||||
|
// requirements in the following subsections. (Notice that
|
||||||
|
// attributes with unrecognized attribute-names are ignored.)
|
||||||
|
const attributeNameLowercase = attributeName.toLowerCase()
|
||||||
|
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "Expires", the user agent MUST process the cookie-av as follows.
|
||||||
|
if (attributeNameLowercase === 'expires') {
|
||||||
|
// 1. Let the expiry-time be the result of parsing the attribute-value
|
||||||
|
// as cookie-date (see Section 5.1.1).
|
||||||
|
const expiryTime = new Date(attributeValue)
|
||||||
|
|
||||||
|
// 2. If the attribute-value failed to parse as a cookie date, ignore
|
||||||
|
// the cookie-av.
|
||||||
|
|
||||||
|
cookieAttributeList.expires = expiryTime
|
||||||
|
} else if (attributeNameLowercase === 'max-age') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
|
||||||
|
// If the attribute-name case-insensitively matches the string "Max-
|
||||||
|
// Age", the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. If the first character of the attribute-value is not a DIGIT or a
|
||||||
|
// "-" character, ignore the cookie-av.
|
||||||
|
const charCode = attributeValue.charCodeAt(0)
|
||||||
|
|
||||||
|
if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. If the remainder of attribute-value contains a non-DIGIT
|
||||||
|
// character, ignore the cookie-av.
|
||||||
|
if (!/^\d+$/.test(attributeValue)) {
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Let delta-seconds be the attribute-value converted to an integer.
|
||||||
|
const deltaSeconds = Number(attributeValue)
|
||||||
|
|
||||||
|
// 4. Let cookie-age-limit be the maximum age of the cookie (which
|
||||||
|
// SHOULD be 400 days or less, see Section 4.1.2.2).
|
||||||
|
|
||||||
|
// 5. Set delta-seconds to the smaller of its present value and cookie-
|
||||||
|
// age-limit.
|
||||||
|
// deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
|
||||||
|
|
||||||
|
// 6. If delta-seconds is less than or equal to zero (0), let expiry-
|
||||||
|
// time be the earliest representable date and time. Otherwise, let
|
||||||
|
// the expiry-time be the current date and time plus delta-seconds
|
||||||
|
// seconds.
|
||||||
|
// const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
|
||||||
|
|
||||||
|
// 7. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Max-Age and an attribute-value of expiry-time.
|
||||||
|
cookieAttributeList.maxAge = deltaSeconds
|
||||||
|
} else if (attributeNameLowercase === 'domain') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
|
||||||
|
// If the attribute-name case-insensitively matches the string "Domain",
|
||||||
|
// the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. Let cookie-domain be the attribute-value.
|
||||||
|
let cookieDomain = attributeValue
|
||||||
|
|
||||||
|
// 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
|
||||||
|
// cookie-domain without its leading %x2E (".").
|
||||||
|
if (cookieDomain[0] === '.') {
|
||||||
|
cookieDomain = cookieDomain.slice(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Convert the cookie-domain to lower case.
|
||||||
|
cookieDomain = cookieDomain.toLowerCase()
|
||||||
|
|
||||||
|
// 4. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Domain and an attribute-value of cookie-domain.
|
||||||
|
cookieAttributeList.domain = cookieDomain
|
||||||
|
} else if (attributeNameLowercase === 'path') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
|
||||||
|
// If the attribute-name case-insensitively matches the string "Path",
|
||||||
|
// the user agent MUST process the cookie-av as follows.
|
||||||
|
|
||||||
|
// 1. If the attribute-value is empty or if the first character of the
|
||||||
|
// attribute-value is not %x2F ("/"):
|
||||||
|
let cookiePath = ''
|
||||||
|
if (attributeValue.length === 0 || attributeValue[0] !== '/') {
|
||||||
|
// 1. Let cookie-path be the default-path.
|
||||||
|
cookiePath = '/'
|
||||||
|
} else {
|
||||||
|
// Otherwise:
|
||||||
|
|
||||||
|
// 1. Let cookie-path be the attribute-value.
|
||||||
|
cookiePath = attributeValue
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of Path and an attribute-value of cookie-path.
|
||||||
|
cookieAttributeList.path = cookiePath
|
||||||
|
} else if (attributeNameLowercase === 'secure') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
|
||||||
|
// If the attribute-name case-insensitively matches the string "Secure",
|
||||||
|
// the user agent MUST append an attribute to the cookie-attribute-list
|
||||||
|
// with an attribute-name of Secure and an empty attribute-value.
|
||||||
|
|
||||||
|
cookieAttributeList.secure = true
|
||||||
|
} else if (attributeNameLowercase === 'httponly') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "HttpOnly", the user agent MUST append an attribute to the cookie-
|
||||||
|
// attribute-list with an attribute-name of HttpOnly and an empty
|
||||||
|
// attribute-value.
|
||||||
|
|
||||||
|
cookieAttributeList.httpOnly = true
|
||||||
|
} else if (attributeNameLowercase === 'samesite') {
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
|
||||||
|
// If the attribute-name case-insensitively matches the string
|
||||||
|
// "SameSite", the user agent MUST process the cookie-av as follows:
|
||||||
|
|
||||||
|
// 1. Let enforcement be "Default".
|
||||||
|
let enforcement = 'Default'
|
||||||
|
|
||||||
|
const attributeValueLowercase = attributeValue.toLowerCase()
|
||||||
|
// 2. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "None", set enforcement to "None".
|
||||||
|
if (attributeValueLowercase.includes('none')) {
|
||||||
|
enforcement = 'None'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "Strict", set enforcement to "Strict".
|
||||||
|
if (attributeValueLowercase.includes('strict')) {
|
||||||
|
enforcement = 'Strict'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. If cookie-av's attribute-value is a case-insensitive match for
|
||||||
|
// "Lax", set enforcement to "Lax".
|
||||||
|
if (attributeValueLowercase.includes('lax')) {
|
||||||
|
enforcement = 'Lax'
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Append an attribute to the cookie-attribute-list with an
|
||||||
|
// attribute-name of "SameSite" and an attribute-value of
|
||||||
|
// enforcement.
|
||||||
|
cookieAttributeList.sameSite = enforcement
|
||||||
|
} else {
|
||||||
|
cookieAttributeList.unparsed ??= []
|
||||||
|
|
||||||
|
cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Return to Step 1 of this algorithm.
|
||||||
|
return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
parseSetCookie,
|
||||||
|
parseUnparsedAttributes
|
||||||
|
}
|
291
node_modules/undici/lib/cookies/util.js
generated
vendored
Normal file
291
node_modules/undici/lib/cookies/util.js
generated
vendored
Normal file
@ -0,0 +1,291 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kHeadersList } = require('../core/symbols')
|
||||||
|
|
||||||
|
function isCTLExcludingHtab (value) {
|
||||||
|
if (value.length === 0) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const char of value) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
(code >= 0x00 || code <= 0x08) ||
|
||||||
|
(code >= 0x0A || code <= 0x1F) ||
|
||||||
|
code === 0x7F
|
||||||
|
) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
CHAR = <any US-ASCII character (octets 0 - 127)>
|
||||||
|
token = 1*<any CHAR except CTLs or separators>
|
||||||
|
separators = "(" | ")" | "<" | ">" | "@"
|
||||||
|
| "," | ";" | ":" | "\" | <">
|
||||||
|
| "/" | "[" | "]" | "?" | "="
|
||||||
|
| "{" | "}" | SP | HT
|
||||||
|
* @param {string} name
|
||||||
|
*/
|
||||||
|
function validateCookieName (name) {
|
||||||
|
for (const char of name) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
(code <= 0x20 || code > 0x7F) ||
|
||||||
|
char === '(' ||
|
||||||
|
char === ')' ||
|
||||||
|
char === '>' ||
|
||||||
|
char === '<' ||
|
||||||
|
char === '@' ||
|
||||||
|
char === ',' ||
|
||||||
|
char === ';' ||
|
||||||
|
char === ':' ||
|
||||||
|
char === '\\' ||
|
||||||
|
char === '"' ||
|
||||||
|
char === '/' ||
|
||||||
|
char === '[' ||
|
||||||
|
char === ']' ||
|
||||||
|
char === '?' ||
|
||||||
|
char === '=' ||
|
||||||
|
char === '{' ||
|
||||||
|
char === '}'
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid cookie name')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
|
||||||
|
cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
|
||||||
|
; US-ASCII characters excluding CTLs,
|
||||||
|
; whitespace DQUOTE, comma, semicolon,
|
||||||
|
; and backslash
|
||||||
|
* @param {string} value
|
||||||
|
*/
|
||||||
|
function validateCookieValue (value) {
|
||||||
|
for (const char of value) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (
|
||||||
|
code < 0x21 || // exclude CTLs (0-31)
|
||||||
|
code === 0x22 ||
|
||||||
|
code === 0x2C ||
|
||||||
|
code === 0x3B ||
|
||||||
|
code === 0x5C ||
|
||||||
|
code > 0x7E // non-ascii
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid header value')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* path-value = <any CHAR except CTLs or ";">
|
||||||
|
* @param {string} path
|
||||||
|
*/
|
||||||
|
function validateCookiePath (path) {
|
||||||
|
for (const char of path) {
|
||||||
|
const code = char.charCodeAt(0)
|
||||||
|
|
||||||
|
if (code < 0x21 || char === ';') {
|
||||||
|
throw new Error('Invalid cookie path')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* I have no idea why these values aren't allowed to be honest,
|
||||||
|
* but Deno tests these. - Khafra
|
||||||
|
* @param {string} domain
|
||||||
|
*/
|
||||||
|
function validateCookieDomain (domain) {
|
||||||
|
if (
|
||||||
|
domain.startsWith('-') ||
|
||||||
|
domain.endsWith('.') ||
|
||||||
|
domain.endsWith('-')
|
||||||
|
) {
|
||||||
|
throw new Error('Invalid cookie domain')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
|
||||||
|
* @param {number|Date} date
|
||||||
|
IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
|
||||||
|
; fixed length/zone/capitalization subset of the format
|
||||||
|
; see Section 3.3 of [RFC5322]
|
||||||
|
|
||||||
|
day-name = %x4D.6F.6E ; "Mon", case-sensitive
|
||||||
|
/ %x54.75.65 ; "Tue", case-sensitive
|
||||||
|
/ %x57.65.64 ; "Wed", case-sensitive
|
||||||
|
/ %x54.68.75 ; "Thu", case-sensitive
|
||||||
|
/ %x46.72.69 ; "Fri", case-sensitive
|
||||||
|
/ %x53.61.74 ; "Sat", case-sensitive
|
||||||
|
/ %x53.75.6E ; "Sun", case-sensitive
|
||||||
|
date1 = day SP month SP year
|
||||||
|
; e.g., 02 Jun 1982
|
||||||
|
|
||||||
|
day = 2DIGIT
|
||||||
|
month = %x4A.61.6E ; "Jan", case-sensitive
|
||||||
|
/ %x46.65.62 ; "Feb", case-sensitive
|
||||||
|
/ %x4D.61.72 ; "Mar", case-sensitive
|
||||||
|
/ %x41.70.72 ; "Apr", case-sensitive
|
||||||
|
/ %x4D.61.79 ; "May", case-sensitive
|
||||||
|
/ %x4A.75.6E ; "Jun", case-sensitive
|
||||||
|
/ %x4A.75.6C ; "Jul", case-sensitive
|
||||||
|
/ %x41.75.67 ; "Aug", case-sensitive
|
||||||
|
/ %x53.65.70 ; "Sep", case-sensitive
|
||||||
|
/ %x4F.63.74 ; "Oct", case-sensitive
|
||||||
|
/ %x4E.6F.76 ; "Nov", case-sensitive
|
||||||
|
/ %x44.65.63 ; "Dec", case-sensitive
|
||||||
|
year = 4DIGIT
|
||||||
|
|
||||||
|
GMT = %x47.4D.54 ; "GMT", case-sensitive
|
||||||
|
|
||||||
|
time-of-day = hour ":" minute ":" second
|
||||||
|
; 00:00:00 - 23:59:60 (leap second)
|
||||||
|
|
||||||
|
hour = 2DIGIT
|
||||||
|
minute = 2DIGIT
|
||||||
|
second = 2DIGIT
|
||||||
|
*/
|
||||||
|
function toIMFDate (date) {
|
||||||
|
if (typeof date === 'number') {
|
||||||
|
date = new Date(date)
|
||||||
|
}
|
||||||
|
|
||||||
|
const days = [
|
||||||
|
'Sun', 'Mon', 'Tue', 'Wed',
|
||||||
|
'Thu', 'Fri', 'Sat'
|
||||||
|
]
|
||||||
|
|
||||||
|
const months = [
|
||||||
|
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||||
|
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
|
||||||
|
]
|
||||||
|
|
||||||
|
const dayName = days[date.getUTCDay()]
|
||||||
|
const day = date.getUTCDate().toString().padStart(2, '0')
|
||||||
|
const month = months[date.getUTCMonth()]
|
||||||
|
const year = date.getUTCFullYear()
|
||||||
|
const hour = date.getUTCHours().toString().padStart(2, '0')
|
||||||
|
const minute = date.getUTCMinutes().toString().padStart(2, '0')
|
||||||
|
const second = date.getUTCSeconds().toString().padStart(2, '0')
|
||||||
|
|
||||||
|
return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
max-age-av = "Max-Age=" non-zero-digit *DIGIT
|
||||||
|
; In practice, both expires-av and max-age-av
|
||||||
|
; are limited to dates representable by the
|
||||||
|
; user agent.
|
||||||
|
* @param {number} maxAge
|
||||||
|
*/
|
||||||
|
function validateCookieMaxAge (maxAge) {
|
||||||
|
if (maxAge < 0) {
|
||||||
|
throw new Error('Invalid cookie max-age')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
|
||||||
|
* @param {import('./index').Cookie} cookie
|
||||||
|
*/
|
||||||
|
function stringify (cookie) {
|
||||||
|
if (cookie.name.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
validateCookieName(cookie.name)
|
||||||
|
validateCookieValue(cookie.value)
|
||||||
|
|
||||||
|
const out = [`${cookie.name}=${cookie.value}`]
|
||||||
|
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
|
||||||
|
// https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
|
||||||
|
if (cookie.name.startsWith('__Secure-')) {
|
||||||
|
cookie.secure = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.name.startsWith('__Host-')) {
|
||||||
|
cookie.secure = true
|
||||||
|
cookie.domain = null
|
||||||
|
cookie.path = '/'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.secure) {
|
||||||
|
out.push('Secure')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.httpOnly) {
|
||||||
|
out.push('HttpOnly')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof cookie.maxAge === 'number') {
|
||||||
|
validateCookieMaxAge(cookie.maxAge)
|
||||||
|
out.push(`Max-Age=${cookie.maxAge}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.domain) {
|
||||||
|
validateCookieDomain(cookie.domain)
|
||||||
|
out.push(`Domain=${cookie.domain}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.path) {
|
||||||
|
validateCookiePath(cookie.path)
|
||||||
|
out.push(`Path=${cookie.path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
|
||||||
|
out.push(`Expires=${toIMFDate(cookie.expires)}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cookie.sameSite) {
|
||||||
|
out.push(`SameSite=${cookie.sameSite}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const part of cookie.unparsed) {
|
||||||
|
if (!part.includes('=')) {
|
||||||
|
throw new Error('Invalid unparsed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const [key, ...value] = part.split('=')
|
||||||
|
|
||||||
|
out.push(`${key.trim()}=${value.join('=')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return out.join('; ')
|
||||||
|
}
|
||||||
|
|
||||||
|
let kHeadersListNode
|
||||||
|
|
||||||
|
function getHeadersList (headers) {
|
||||||
|
if (headers[kHeadersList]) {
|
||||||
|
return headers[kHeadersList]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!kHeadersListNode) {
|
||||||
|
kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
|
||||||
|
(symbol) => symbol.description === 'headers list'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert(kHeadersListNode, 'Headers cannot be parsed')
|
||||||
|
}
|
||||||
|
|
||||||
|
const headersList = headers[kHeadersListNode]
|
||||||
|
assert(headersList)
|
||||||
|
|
||||||
|
return headersList
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
isCTLExcludingHtab,
|
||||||
|
stringify,
|
||||||
|
getHeadersList
|
||||||
|
}
|
189
node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
189
node_modules/undici/lib/core/connect.js
generated
vendored
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const net = require('net')
|
||||||
|
const assert = require('assert')
|
||||||
|
const util = require('./util')
|
||||||
|
const { InvalidArgumentError, ConnectTimeoutError } = require('./errors')
|
||||||
|
|
||||||
|
let tls // include tls conditionally since it is not always available
|
||||||
|
|
||||||
|
// TODO: session re-use does not wait for the first
|
||||||
|
// connection to resolve the session and might therefore
|
||||||
|
// resolve the same servername multiple times even when
|
||||||
|
// re-use is enabled.
|
||||||
|
|
||||||
|
let SessionCache
|
||||||
|
// FIXME: remove workaround when the Node bug is fixed
|
||||||
|
// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
|
||||||
|
if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
|
||||||
|
SessionCache = class WeakSessionCache {
|
||||||
|
constructor (maxCachedSessions) {
|
||||||
|
this._maxCachedSessions = maxCachedSessions
|
||||||
|
this._sessionCache = new Map()
|
||||||
|
this._sessionRegistry = new global.FinalizationRegistry((key) => {
|
||||||
|
if (this._sessionCache.size < this._maxCachedSessions) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const ref = this._sessionCache.get(key)
|
||||||
|
if (ref !== undefined && ref.deref() === undefined) {
|
||||||
|
this._sessionCache.delete(key)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
get (sessionKey) {
|
||||||
|
const ref = this._sessionCache.get(sessionKey)
|
||||||
|
return ref ? ref.deref() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
set (sessionKey, session) {
|
||||||
|
if (this._maxCachedSessions === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this._sessionCache.set(sessionKey, new WeakRef(session))
|
||||||
|
this._sessionRegistry.register(session, sessionKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SessionCache = class SimpleSessionCache {
|
||||||
|
constructor (maxCachedSessions) {
|
||||||
|
this._maxCachedSessions = maxCachedSessions
|
||||||
|
this._sessionCache = new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
get (sessionKey) {
|
||||||
|
return this._sessionCache.get(sessionKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
set (sessionKey, session) {
|
||||||
|
if (this._maxCachedSessions === 0) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._sessionCache.size >= this._maxCachedSessions) {
|
||||||
|
// remove the oldest session
|
||||||
|
const { value: oldestKey } = this._sessionCache.keys().next()
|
||||||
|
this._sessionCache.delete(oldestKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
this._sessionCache.set(sessionKey, session)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
|
||||||
|
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
|
||||||
|
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
|
||||||
|
}
|
||||||
|
|
||||||
|
const options = { path: socketPath, ...opts }
|
||||||
|
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
|
||||||
|
timeout = timeout == null ? 10e3 : timeout
|
||||||
|
allowH2 = allowH2 != null ? allowH2 : false
|
||||||
|
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
|
||||||
|
let socket
|
||||||
|
if (protocol === 'https:') {
|
||||||
|
if (!tls) {
|
||||||
|
tls = require('tls')
|
||||||
|
}
|
||||||
|
servername = servername || options.servername || util.getServerName(host) || null
|
||||||
|
|
||||||
|
const sessionKey = servername || hostname
|
||||||
|
const session = sessionCache.get(sessionKey) || null
|
||||||
|
|
||||||
|
assert(sessionKey)
|
||||||
|
|
||||||
|
socket = tls.connect({
|
||||||
|
highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
|
||||||
|
...options,
|
||||||
|
servername,
|
||||||
|
session,
|
||||||
|
localAddress,
|
||||||
|
// TODO(HTTP/2): Add support for h2c
|
||||||
|
ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
|
||||||
|
socket: httpSocket, // upgrade socket connection
|
||||||
|
port: port || 443,
|
||||||
|
host: hostname
|
||||||
|
})
|
||||||
|
|
||||||
|
socket
|
||||||
|
.on('session', function (session) {
|
||||||
|
// TODO (fix): Can a session become invalid once established? Don't think so?
|
||||||
|
sessionCache.set(sessionKey, session)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
assert(!httpSocket, 'httpSocket can only be sent on TLS update')
|
||||||
|
socket = net.connect({
|
||||||
|
highWaterMark: 64 * 1024, // Same as nodejs fs streams.
|
||||||
|
...options,
|
||||||
|
localAddress,
|
||||||
|
port: port || 80,
|
||||||
|
host: hostname
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
|
||||||
|
if (options.keepAlive == null || options.keepAlive) {
|
||||||
|
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
|
||||||
|
socket.setKeepAlive(true, keepAliveInitialDelay)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
|
||||||
|
|
||||||
|
socket
|
||||||
|
.setNoDelay(true)
|
||||||
|
.once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
|
||||||
|
cancelTimeout()
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
const cb = callback
|
||||||
|
callback = null
|
||||||
|
cb(null, this)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.on('error', function (err) {
|
||||||
|
cancelTimeout()
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
const cb = callback
|
||||||
|
callback = null
|
||||||
|
cb(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return socket
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupTimeout (onConnectTimeout, timeout) {
|
||||||
|
if (!timeout) {
|
||||||
|
return () => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let s1 = null
|
||||||
|
let s2 = null
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
// setImmediate is added to make sure that we priotorise socket error events over timeouts
|
||||||
|
s1 = setImmediate(() => {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
// Windows needs an extra setImmediate probably due to implementation differences in the socket logic
|
||||||
|
s2 = setImmediate(() => onConnectTimeout())
|
||||||
|
} else {
|
||||||
|
onConnectTimeout()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}, timeout)
|
||||||
|
return () => {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
clearImmediate(s1)
|
||||||
|
clearImmediate(s2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function onConnectTimeout (socket) {
|
||||||
|
util.destroy(socket, new ConnectTimeoutError())
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = buildConnector
|
216
node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
216
node_modules/undici/lib/core/errors.js
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
class UndiciError extends Error {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
this.name = 'UndiciError'
|
||||||
|
this.code = 'UND_ERR'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ConnectTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ConnectTimeoutError)
|
||||||
|
this.name = 'ConnectTimeoutError'
|
||||||
|
this.message = message || 'Connect Timeout Error'
|
||||||
|
this.code = 'UND_ERR_CONNECT_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HeadersTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HeadersTimeoutError)
|
||||||
|
this.name = 'HeadersTimeoutError'
|
||||||
|
this.message = message || 'Headers Timeout Error'
|
||||||
|
this.code = 'UND_ERR_HEADERS_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HeadersOverflowError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HeadersOverflowError)
|
||||||
|
this.name = 'HeadersOverflowError'
|
||||||
|
this.message = message || 'Headers Overflow Error'
|
||||||
|
this.code = 'UND_ERR_HEADERS_OVERFLOW'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BodyTimeoutError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, BodyTimeoutError)
|
||||||
|
this.name = 'BodyTimeoutError'
|
||||||
|
this.message = message || 'Body Timeout Error'
|
||||||
|
this.code = 'UND_ERR_BODY_TIMEOUT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseStatusCodeError extends UndiciError {
|
||||||
|
constructor (message, statusCode, headers, body) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseStatusCodeError)
|
||||||
|
this.name = 'ResponseStatusCodeError'
|
||||||
|
this.message = message || 'Response Status Code Error'
|
||||||
|
this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
|
||||||
|
this.body = body
|
||||||
|
this.status = statusCode
|
||||||
|
this.statusCode = statusCode
|
||||||
|
this.headers = headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidArgumentError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InvalidArgumentError)
|
||||||
|
this.name = 'InvalidArgumentError'
|
||||||
|
this.message = message || 'Invalid Argument Error'
|
||||||
|
this.code = 'UND_ERR_INVALID_ARG'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InvalidReturnValueError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InvalidReturnValueError)
|
||||||
|
this.name = 'InvalidReturnValueError'
|
||||||
|
this.message = message || 'Invalid Return Value Error'
|
||||||
|
this.code = 'UND_ERR_INVALID_RETURN_VALUE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RequestAbortedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, RequestAbortedError)
|
||||||
|
this.name = 'AbortError'
|
||||||
|
this.message = message || 'Request aborted'
|
||||||
|
this.code = 'UND_ERR_ABORTED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class InformationalError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, InformationalError)
|
||||||
|
this.name = 'InformationalError'
|
||||||
|
this.message = message || 'Request information'
|
||||||
|
this.code = 'UND_ERR_INFO'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RequestContentLengthMismatchError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, RequestContentLengthMismatchError)
|
||||||
|
this.name = 'RequestContentLengthMismatchError'
|
||||||
|
this.message = message || 'Request body length does not match content-length header'
|
||||||
|
this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseContentLengthMismatchError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseContentLengthMismatchError)
|
||||||
|
this.name = 'ResponseContentLengthMismatchError'
|
||||||
|
this.message = message || 'Response body length does not match content-length header'
|
||||||
|
this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ClientDestroyedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ClientDestroyedError)
|
||||||
|
this.name = 'ClientDestroyedError'
|
||||||
|
this.message = message || 'The client is destroyed'
|
||||||
|
this.code = 'UND_ERR_DESTROYED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ClientClosedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ClientClosedError)
|
||||||
|
this.name = 'ClientClosedError'
|
||||||
|
this.message = message || 'The client is closed'
|
||||||
|
this.code = 'UND_ERR_CLOSED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class SocketError extends UndiciError {
|
||||||
|
constructor (message, socket) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, SocketError)
|
||||||
|
this.name = 'SocketError'
|
||||||
|
this.message = message || 'Socket error'
|
||||||
|
this.code = 'UND_ERR_SOCKET'
|
||||||
|
this.socket = socket
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NotSupportedError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, NotSupportedError)
|
||||||
|
this.name = 'NotSupportedError'
|
||||||
|
this.message = message || 'Not supported error'
|
||||||
|
this.code = 'UND_ERR_NOT_SUPPORTED'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BalancedPoolMissingUpstreamError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, NotSupportedError)
|
||||||
|
this.name = 'MissingUpstreamError'
|
||||||
|
this.message = message || 'No upstream has been added to the BalancedPool'
|
||||||
|
this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class HTTPParserError extends Error {
|
||||||
|
constructor (message, code, data) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, HTTPParserError)
|
||||||
|
this.name = 'HTTPParserError'
|
||||||
|
this.code = code ? `HPE_${code}` : undefined
|
||||||
|
this.data = data ? data.toString() : undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ResponseExceededMaxSizeError extends UndiciError {
|
||||||
|
constructor (message) {
|
||||||
|
super(message)
|
||||||
|
Error.captureStackTrace(this, ResponseExceededMaxSizeError)
|
||||||
|
this.name = 'ResponseExceededMaxSizeError'
|
||||||
|
this.message = message || 'Response content exceeded max size'
|
||||||
|
this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
HTTPParserError,
|
||||||
|
UndiciError,
|
||||||
|
HeadersTimeoutError,
|
||||||
|
HeadersOverflowError,
|
||||||
|
BodyTimeoutError,
|
||||||
|
RequestContentLengthMismatchError,
|
||||||
|
ConnectTimeoutError,
|
||||||
|
ResponseStatusCodeError,
|
||||||
|
InvalidArgumentError,
|
||||||
|
InvalidReturnValueError,
|
||||||
|
RequestAbortedError,
|
||||||
|
ClientDestroyedError,
|
||||||
|
ClientClosedError,
|
||||||
|
InformationalError,
|
||||||
|
SocketError,
|
||||||
|
NotSupportedError,
|
||||||
|
ResponseContentLengthMismatchError,
|
||||||
|
BalancedPoolMissingUpstreamError,
|
||||||
|
ResponseExceededMaxSizeError
|
||||||
|
}
|
483
node_modules/undici/lib/core/request.js
generated
vendored
Normal file
483
node_modules/undici/lib/core/request.js
generated
vendored
Normal file
@ -0,0 +1,483 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const {
|
||||||
|
InvalidArgumentError,
|
||||||
|
NotSupportedError
|
||||||
|
} = require('./errors')
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
|
||||||
|
const util = require('./util')
|
||||||
|
|
||||||
|
// tokenRegExp and headerCharRegex have been lifted from
|
||||||
|
// https://github.com/nodejs/node/blob/main/lib/_http_common.js
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies that the given val is a valid HTTP token
|
||||||
|
* per the rules defined in RFC 7230
|
||||||
|
* See https://tools.ietf.org/html/rfc7230#section-3.2.6
|
||||||
|
*/
|
||||||
|
const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches if val contains an invalid field-vchar
|
||||||
|
* field-value = *( field-content / obs-fold )
|
||||||
|
* field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
|
||||||
|
* field-vchar = VCHAR / obs-text
|
||||||
|
*/
|
||||||
|
const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||||
|
|
||||||
|
// Verifies that a given path is valid does not contain control chars \x00 to \x20
|
||||||
|
const invalidPathRegex = /[^\u0021-\u00ff]/
|
||||||
|
|
||||||
|
const kHandler = Symbol('handler')
|
||||||
|
|
||||||
|
const channels = {}
|
||||||
|
|
||||||
|
let extractBody
|
||||||
|
|
||||||
|
try {
|
||||||
|
const diagnosticsChannel = require('diagnostics_channel')
|
||||||
|
channels.create = diagnosticsChannel.channel('undici:request:create')
|
||||||
|
channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
|
||||||
|
channels.headers = diagnosticsChannel.channel('undici:request:headers')
|
||||||
|
channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
|
||||||
|
channels.error = diagnosticsChannel.channel('undici:request:error')
|
||||||
|
} catch {
|
||||||
|
channels.create = { hasSubscribers: false }
|
||||||
|
channels.bodySent = { hasSubscribers: false }
|
||||||
|
channels.headers = { hasSubscribers: false }
|
||||||
|
channels.trailers = { hasSubscribers: false }
|
||||||
|
channels.error = { hasSubscribers: false }
|
||||||
|
}
|
||||||
|
|
||||||
|
class Request {
|
||||||
|
constructor (origin, {
|
||||||
|
path,
|
||||||
|
method,
|
||||||
|
body,
|
||||||
|
headers,
|
||||||
|
query,
|
||||||
|
idempotent,
|
||||||
|
blocking,
|
||||||
|
upgrade,
|
||||||
|
headersTimeout,
|
||||||
|
bodyTimeout,
|
||||||
|
reset,
|
||||||
|
throwOnError,
|
||||||
|
expectContinue
|
||||||
|
}, handler) {
|
||||||
|
if (typeof path !== 'string') {
|
||||||
|
throw new InvalidArgumentError('path must be a string')
|
||||||
|
} else if (
|
||||||
|
path[0] !== '/' &&
|
||||||
|
!(path.startsWith('http://') || path.startsWith('https://')) &&
|
||||||
|
method !== 'CONNECT'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
|
||||||
|
} else if (invalidPathRegex.exec(path) !== null) {
|
||||||
|
throw new InvalidArgumentError('invalid request path')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof method !== 'string') {
|
||||||
|
throw new InvalidArgumentError('method must be a string')
|
||||||
|
} else if (tokenRegExp.exec(method) === null) {
|
||||||
|
throw new InvalidArgumentError('invalid request method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upgrade && typeof upgrade !== 'string') {
|
||||||
|
throw new InvalidArgumentError('upgrade must be a string')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid headersTimeout')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
|
||||||
|
throw new InvalidArgumentError('invalid bodyTimeout')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reset != null && typeof reset !== 'boolean') {
|
||||||
|
throw new InvalidArgumentError('invalid reset')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectContinue != null && typeof expectContinue !== 'boolean') {
|
||||||
|
throw new InvalidArgumentError('invalid expectContinue')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.headersTimeout = headersTimeout
|
||||||
|
|
||||||
|
this.bodyTimeout = bodyTimeout
|
||||||
|
|
||||||
|
this.throwOnError = throwOnError === true
|
||||||
|
|
||||||
|
this.method = method
|
||||||
|
|
||||||
|
this.abort = null
|
||||||
|
|
||||||
|
if (body == null) {
|
||||||
|
this.body = null
|
||||||
|
} else if (util.isStream(body)) {
|
||||||
|
this.body = body
|
||||||
|
|
||||||
|
const rState = this.body._readableState
|
||||||
|
if (!rState || !rState.autoDestroy) {
|
||||||
|
this.endHandler = function autoDestroy () {
|
||||||
|
util.destroy(this)
|
||||||
|
}
|
||||||
|
this.body.on('end', this.endHandler)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.errorHandler = err => {
|
||||||
|
if (this.abort) {
|
||||||
|
this.abort(err)
|
||||||
|
} else {
|
||||||
|
this.error = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.body.on('error', this.errorHandler)
|
||||||
|
} else if (util.isBuffer(body)) {
|
||||||
|
this.body = body.byteLength ? body : null
|
||||||
|
} else if (ArrayBuffer.isView(body)) {
|
||||||
|
this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
|
||||||
|
} else if (body instanceof ArrayBuffer) {
|
||||||
|
this.body = body.byteLength ? Buffer.from(body) : null
|
||||||
|
} else if (typeof body === 'string') {
|
||||||
|
this.body = body.length ? Buffer.from(body) : null
|
||||||
|
} else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
|
||||||
|
this.body = body
|
||||||
|
} else {
|
||||||
|
throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.completed = false
|
||||||
|
|
||||||
|
this.aborted = false
|
||||||
|
|
||||||
|
this.upgrade = upgrade || null
|
||||||
|
|
||||||
|
this.path = query ? util.buildURL(path, query) : path
|
||||||
|
|
||||||
|
this.origin = origin
|
||||||
|
|
||||||
|
this.idempotent = idempotent == null
|
||||||
|
? method === 'HEAD' || method === 'GET'
|
||||||
|
: idempotent
|
||||||
|
|
||||||
|
this.blocking = blocking == null ? false : blocking
|
||||||
|
|
||||||
|
this.reset = reset == null ? null : reset
|
||||||
|
|
||||||
|
this.host = null
|
||||||
|
|
||||||
|
this.contentLength = null
|
||||||
|
|
||||||
|
this.contentType = null
|
||||||
|
|
||||||
|
this.headers = ''
|
||||||
|
|
||||||
|
// Only for H2
|
||||||
|
this.expectContinue = expectContinue != null ? expectContinue : false
|
||||||
|
|
||||||
|
if (Array.isArray(headers)) {
|
||||||
|
if (headers.length % 2 !== 0) {
|
||||||
|
throw new InvalidArgumentError('headers array must be even')
|
||||||
|
}
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
processHeader(this, headers[i], headers[i + 1])
|
||||||
|
}
|
||||||
|
} else if (headers && typeof headers === 'object') {
|
||||||
|
const keys = Object.keys(headers)
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
const key = keys[i]
|
||||||
|
processHeader(this, key, headers[key])
|
||||||
|
}
|
||||||
|
} else if (headers != null) {
|
||||||
|
throw new InvalidArgumentError('headers must be an object or an array')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (util.isFormDataLike(this.body)) {
|
||||||
|
if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
|
||||||
|
throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!extractBody) {
|
||||||
|
extractBody = require('../fetch/body.js').extractBody
|
||||||
|
}
|
||||||
|
|
||||||
|
const [bodyStream, contentType] = extractBody(body)
|
||||||
|
if (this.contentType == null) {
|
||||||
|
this.contentType = contentType
|
||||||
|
this.headers += `content-type: ${contentType}\r\n`
|
||||||
|
}
|
||||||
|
this.body = bodyStream.stream
|
||||||
|
this.contentLength = bodyStream.length
|
||||||
|
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
|
||||||
|
this.contentType = body.type
|
||||||
|
this.headers += `content-type: ${body.type}\r\n`
|
||||||
|
}
|
||||||
|
|
||||||
|
util.validateHandler(handler, method, upgrade)
|
||||||
|
|
||||||
|
this.servername = util.getServerName(this.host)
|
||||||
|
|
||||||
|
this[kHandler] = handler
|
||||||
|
|
||||||
|
if (channels.create.hasSubscribers) {
|
||||||
|
channels.create.publish({ request: this })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onBodySent (chunk) {
|
||||||
|
if (this[kHandler].onBodySent) {
|
||||||
|
try {
|
||||||
|
this[kHandler].onBodySent(chunk)
|
||||||
|
} catch (err) {
|
||||||
|
this.onError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onRequestSent () {
|
||||||
|
if (channels.bodySent.hasSubscribers) {
|
||||||
|
channels.bodySent.publish({ request: this })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kHandler].onRequestSent) {
|
||||||
|
try {
|
||||||
|
this[kHandler].onRequestSent()
|
||||||
|
} catch (err) {
|
||||||
|
this.onError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onConnect (abort) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
if (this.error) {
|
||||||
|
abort(this.error)
|
||||||
|
} else {
|
||||||
|
this.abort = abort
|
||||||
|
return this[kHandler].onConnect(abort)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onHeaders (statusCode, headers, resume, statusText) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
if (channels.headers.hasSubscribers) {
|
||||||
|
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
|
||||||
|
}
|
||||||
|
|
||||||
|
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
|
||||||
|
}
|
||||||
|
|
||||||
|
onData (chunk) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
return this[kHandler].onData(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
onUpgrade (statusCode, headers, socket) {
|
||||||
|
assert(!this.aborted)
|
||||||
|
assert(!this.completed)
|
||||||
|
|
||||||
|
return this[kHandler].onUpgrade(statusCode, headers, socket)
|
||||||
|
}
|
||||||
|
|
||||||
|
onComplete (trailers) {
|
||||||
|
this.onFinally()
|
||||||
|
|
||||||
|
assert(!this.aborted)
|
||||||
|
|
||||||
|
this.completed = true
|
||||||
|
if (channels.trailers.hasSubscribers) {
|
||||||
|
channels.trailers.publish({ request: this, trailers })
|
||||||
|
}
|
||||||
|
return this[kHandler].onComplete(trailers)
|
||||||
|
}
|
||||||
|
|
||||||
|
onError (error) {
|
||||||
|
this.onFinally()
|
||||||
|
|
||||||
|
if (channels.error.hasSubscribers) {
|
||||||
|
channels.error.publish({ request: this, error })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.aborted = true
|
||||||
|
return this[kHandler].onError(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
onFinally () {
|
||||||
|
if (this.errorHandler) {
|
||||||
|
this.body.off('error', this.errorHandler)
|
||||||
|
this.errorHandler = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.endHandler) {
|
||||||
|
this.body.off('end', this.endHandler)
|
||||||
|
this.endHandler = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: adjust to support H2
|
||||||
|
addHeader (key, value) {
|
||||||
|
processHeader(this, key, value)
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP1BuildRequest] (origin, opts, handler) {
|
||||||
|
// TODO: Migrate header parsing here, to make Requests
|
||||||
|
// HTTP agnostic
|
||||||
|
return new Request(origin, opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP2BuildRequest] (origin, opts, handler) {
|
||||||
|
const headers = opts.headers
|
||||||
|
opts = { ...opts, headers: null }
|
||||||
|
|
||||||
|
const request = new Request(origin, opts, handler)
|
||||||
|
|
||||||
|
request.headers = {}
|
||||||
|
|
||||||
|
if (Array.isArray(headers)) {
|
||||||
|
if (headers.length % 2 !== 0) {
|
||||||
|
throw new InvalidArgumentError('headers array must be even')
|
||||||
|
}
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
processHeader(request, headers[i], headers[i + 1], true)
|
||||||
|
}
|
||||||
|
} else if (headers && typeof headers === 'object') {
|
||||||
|
const keys = Object.keys(headers)
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
const key = keys[i]
|
||||||
|
processHeader(request, key, headers[key], true)
|
||||||
|
}
|
||||||
|
} else if (headers != null) {
|
||||||
|
throw new InvalidArgumentError('headers must be an object or an array')
|
||||||
|
}
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
||||||
|
|
||||||
|
static [kHTTP2CopyHeaders] (raw) {
|
||||||
|
const rawHeaders = raw.split('\r\n')
|
||||||
|
const headers = {}
|
||||||
|
|
||||||
|
for (const header of rawHeaders) {
|
||||||
|
const [key, value] = header.split(': ')
|
||||||
|
|
||||||
|
if (value == null || value.length === 0) continue
|
||||||
|
|
||||||
|
if (headers[key]) headers[key] += `,${value}`
|
||||||
|
else headers[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
return headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function processHeaderValue (key, val, skipAppend) {
|
||||||
|
if (val && typeof val === 'object') {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
|
||||||
|
val = val != null ? `${val}` : ''
|
||||||
|
|
||||||
|
if (headerCharRegex.exec(val) !== null) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return skipAppend ? val : `${key}: ${val}\r\n`
|
||||||
|
}
|
||||||
|
|
||||||
|
function processHeader (request, key, val, skipAppend = false) {
|
||||||
|
if (val && (typeof val === 'object' && !Array.isArray(val))) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
} else if (val === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
request.host === null &&
|
||||||
|
key.length === 4 &&
|
||||||
|
key.toLowerCase() === 'host'
|
||||||
|
) {
|
||||||
|
if (headerCharRegex.exec(val) !== null) {
|
||||||
|
throw new InvalidArgumentError(`invalid ${key} header`)
|
||||||
|
}
|
||||||
|
// Consumed by Client
|
||||||
|
request.host = val
|
||||||
|
} else if (
|
||||||
|
request.contentLength === null &&
|
||||||
|
key.length === 14 &&
|
||||||
|
key.toLowerCase() === 'content-length'
|
||||||
|
) {
|
||||||
|
request.contentLength = parseInt(val, 10)
|
||||||
|
if (!Number.isFinite(request.contentLength)) {
|
||||||
|
throw new InvalidArgumentError('invalid content-length header')
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
request.contentType === null &&
|
||||||
|
key.length === 12 &&
|
||||||
|
key.toLowerCase() === 'content-type'
|
||||||
|
) {
|
||||||
|
request.contentType = val
|
||||||
|
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||||
|
else request.headers += processHeaderValue(key, val)
|
||||||
|
} else if (
|
||||||
|
key.length === 17 &&
|
||||||
|
key.toLowerCase() === 'transfer-encoding'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid transfer-encoding header')
|
||||||
|
} else if (
|
||||||
|
key.length === 10 &&
|
||||||
|
key.toLowerCase() === 'connection'
|
||||||
|
) {
|
||||||
|
const value = typeof val === 'string' ? val.toLowerCase() : null
|
||||||
|
if (value !== 'close' && value !== 'keep-alive') {
|
||||||
|
throw new InvalidArgumentError('invalid connection header')
|
||||||
|
} else if (value === 'close') {
|
||||||
|
request.reset = true
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
key.length === 10 &&
|
||||||
|
key.toLowerCase() === 'keep-alive'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid keep-alive header')
|
||||||
|
} else if (
|
||||||
|
key.length === 7 &&
|
||||||
|
key.toLowerCase() === 'upgrade'
|
||||||
|
) {
|
||||||
|
throw new InvalidArgumentError('invalid upgrade header')
|
||||||
|
} else if (
|
||||||
|
key.length === 6 &&
|
||||||
|
key.toLowerCase() === 'expect'
|
||||||
|
) {
|
||||||
|
throw new NotSupportedError('expect header not supported')
|
||||||
|
} else if (tokenRegExp.exec(key) === null) {
|
||||||
|
throw new InvalidArgumentError('invalid header key')
|
||||||
|
} else {
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
for (let i = 0; i < val.length; i++) {
|
||||||
|
if (skipAppend) {
|
||||||
|
if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
|
||||||
|
else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
|
||||||
|
} else {
|
||||||
|
request.headers += processHeaderValue(key, val[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
|
||||||
|
else request.headers += processHeaderValue(key, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Request
|
61
node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
61
node_modules/undici/lib/core/symbols.js
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
module.exports = {
|
||||||
|
kClose: Symbol('close'),
|
||||||
|
kDestroy: Symbol('destroy'),
|
||||||
|
kDispatch: Symbol('dispatch'),
|
||||||
|
kUrl: Symbol('url'),
|
||||||
|
kWriting: Symbol('writing'),
|
||||||
|
kResuming: Symbol('resuming'),
|
||||||
|
kQueue: Symbol('queue'),
|
||||||
|
kConnect: Symbol('connect'),
|
||||||
|
kConnecting: Symbol('connecting'),
|
||||||
|
kHeadersList: Symbol('headers list'),
|
||||||
|
kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
|
||||||
|
kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
|
||||||
|
kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
|
||||||
|
kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
|
||||||
|
kKeepAlive: Symbol('keep alive'),
|
||||||
|
kHeadersTimeout: Symbol('headers timeout'),
|
||||||
|
kBodyTimeout: Symbol('body timeout'),
|
||||||
|
kServerName: Symbol('server name'),
|
||||||
|
kLocalAddress: Symbol('local address'),
|
||||||
|
kHost: Symbol('host'),
|
||||||
|
kNoRef: Symbol('no ref'),
|
||||||
|
kBodyUsed: Symbol('used'),
|
||||||
|
kRunning: Symbol('running'),
|
||||||
|
kBlocking: Symbol('blocking'),
|
||||||
|
kPending: Symbol('pending'),
|
||||||
|
kSize: Symbol('size'),
|
||||||
|
kBusy: Symbol('busy'),
|
||||||
|
kQueued: Symbol('queued'),
|
||||||
|
kFree: Symbol('free'),
|
||||||
|
kConnected: Symbol('connected'),
|
||||||
|
kClosed: Symbol('closed'),
|
||||||
|
kNeedDrain: Symbol('need drain'),
|
||||||
|
kReset: Symbol('reset'),
|
||||||
|
kDestroyed: Symbol.for('nodejs.stream.destroyed'),
|
||||||
|
kMaxHeadersSize: Symbol('max headers size'),
|
||||||
|
kRunningIdx: Symbol('running index'),
|
||||||
|
kPendingIdx: Symbol('pending index'),
|
||||||
|
kError: Symbol('error'),
|
||||||
|
kClients: Symbol('clients'),
|
||||||
|
kClient: Symbol('client'),
|
||||||
|
kParser: Symbol('parser'),
|
||||||
|
kOnDestroyed: Symbol('destroy callbacks'),
|
||||||
|
kPipelining: Symbol('pipelining'),
|
||||||
|
kSocket: Symbol('socket'),
|
||||||
|
kHostHeader: Symbol('host header'),
|
||||||
|
kConnector: Symbol('connector'),
|
||||||
|
kStrictContentLength: Symbol('strict content length'),
|
||||||
|
kMaxRedirections: Symbol('maxRedirections'),
|
||||||
|
kMaxRequests: Symbol('maxRequestsPerClient'),
|
||||||
|
kProxy: Symbol('proxy agent options'),
|
||||||
|
kCounter: Symbol('socket request counter'),
|
||||||
|
kInterceptors: Symbol('dispatch interceptors'),
|
||||||
|
kMaxResponseSize: Symbol('max response size'),
|
||||||
|
kHTTP2Session: Symbol('http2Session'),
|
||||||
|
kHTTP2SessionState: Symbol('http2Session state'),
|
||||||
|
kHTTP2BuildRequest: Symbol('http2 build request'),
|
||||||
|
kHTTP1BuildRequest: Symbol('http1 build request'),
|
||||||
|
kHTTP2CopyHeaders: Symbol('http2 copy headers'),
|
||||||
|
kHTTPConnVersion: Symbol('http connection version')
|
||||||
|
}
|
503
node_modules/undici/lib/core/util.js
generated
vendored
Normal file
503
node_modules/undici/lib/core/util.js
generated
vendored
Normal file
@ -0,0 +1,503 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const { kDestroyed, kBodyUsed } = require('./symbols')
|
||||||
|
const { IncomingMessage } = require('http')
|
||||||
|
const stream = require('stream')
|
||||||
|
const net = require('net')
|
||||||
|
const { InvalidArgumentError } = require('./errors')
|
||||||
|
const { Blob } = require('buffer')
|
||||||
|
const nodeUtil = require('util')
|
||||||
|
const { stringify } = require('querystring')
|
||||||
|
|
||||||
|
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||||
|
|
||||||
|
function nop () {}
|
||||||
|
|
||||||
|
function isStream (obj) {
|
||||||
|
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
|
||||||
|
}
|
||||||
|
|
||||||
|
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
|
||||||
|
function isBlobLike (object) {
|
||||||
|
return (Blob && object instanceof Blob) || (
|
||||||
|
object &&
|
||||||
|
typeof object === 'object' &&
|
||||||
|
(typeof object.stream === 'function' ||
|
||||||
|
typeof object.arrayBuffer === 'function') &&
|
||||||
|
/^(Blob|File)$/.test(object[Symbol.toStringTag])
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildURL (url, queryParams) {
|
||||||
|
if (url.includes('?') || url.includes('#')) {
|
||||||
|
throw new Error('Query params cannot be passed when url already contains "?" or "#".')
|
||||||
|
}
|
||||||
|
|
||||||
|
const stringified = stringify(queryParams)
|
||||||
|
|
||||||
|
if (stringified) {
|
||||||
|
url += '?' + stringified
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseURL (url) {
|
||||||
|
if (typeof url === 'string') {
|
||||||
|
url = new URL(url)
|
||||||
|
|
||||||
|
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!url || typeof url !== 'object') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^https?:/.test(url.origin || url.protocol)) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(url instanceof URL)) {
|
||||||
|
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
|
||||||
|
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.path != null && typeof url.path !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.pathname != null && typeof url.pathname !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.hostname != null && typeof url.hostname !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.origin != null && typeof url.origin !== 'string') {
|
||||||
|
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = url.port != null
|
||||||
|
? url.port
|
||||||
|
: (url.protocol === 'https:' ? 443 : 80)
|
||||||
|
let origin = url.origin != null
|
||||||
|
? url.origin
|
||||||
|
: `${url.protocol}//${url.hostname}:${port}`
|
||||||
|
let path = url.path != null
|
||||||
|
? url.path
|
||||||
|
: `${url.pathname || ''}${url.search || ''}`
|
||||||
|
|
||||||
|
if (origin.endsWith('/')) {
|
||||||
|
origin = origin.substring(0, origin.length - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path && !path.startsWith('/')) {
|
||||||
|
path = `/${path}`
|
||||||
|
}
|
||||||
|
// new URL(path, origin) is unsafe when `path` contains an absolute URL
|
||||||
|
// From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
|
||||||
|
// If first parameter is a relative URL, second param is required, and will be used as the base URL.
|
||||||
|
// If first parameter is an absolute URL, a given second param will be ignored.
|
||||||
|
url = new URL(origin + path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseOrigin (url) {
|
||||||
|
url = parseURL(url)
|
||||||
|
|
||||||
|
if (url.pathname !== '/' || url.search || url.hash) {
|
||||||
|
throw new InvalidArgumentError('invalid url')
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHostname (host) {
|
||||||
|
if (host[0] === '[') {
|
||||||
|
const idx = host.indexOf(']')
|
||||||
|
|
||||||
|
assert(idx !== -1)
|
||||||
|
return host.substr(1, idx - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
const idx = host.indexOf(':')
|
||||||
|
if (idx === -1) return host
|
||||||
|
|
||||||
|
return host.substr(0, idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IP addresses are not valid server names per RFC6066
|
||||||
|
// > Currently, the only server names supported are DNS hostnames
|
||||||
|
function getServerName (host) {
|
||||||
|
if (!host) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(typeof host, 'string')
|
||||||
|
|
||||||
|
const servername = getHostname(host)
|
||||||
|
if (net.isIP(servername)) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
return servername
|
||||||
|
}
|
||||||
|
|
||||||
|
function deepClone (obj) {
|
||||||
|
return JSON.parse(JSON.stringify(obj))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isAsyncIterable (obj) {
|
||||||
|
return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
|
||||||
|
}
|
||||||
|
|
||||||
|
function isIterable (obj) {
|
||||||
|
return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
|
||||||
|
}
|
||||||
|
|
||||||
|
function bodyLength (body) {
|
||||||
|
if (body == null) {
|
||||||
|
return 0
|
||||||
|
} else if (isStream(body)) {
|
||||||
|
const state = body._readableState
|
||||||
|
return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
|
||||||
|
? state.length
|
||||||
|
: null
|
||||||
|
} else if (isBlobLike(body)) {
|
||||||
|
return body.size != null ? body.size : null
|
||||||
|
} else if (isBuffer(body)) {
|
||||||
|
return body.byteLength
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDestroyed (stream) {
|
||||||
|
return !stream || !!(stream.destroyed || stream[kDestroyed])
|
||||||
|
}
|
||||||
|
|
||||||
|
function isReadableAborted (stream) {
|
||||||
|
const state = stream && stream._readableState
|
||||||
|
return isDestroyed(stream) && state && !state.endEmitted
|
||||||
|
}
|
||||||
|
|
||||||
|
function destroy (stream, err) {
|
||||||
|
if (stream == null || !isStream(stream) || isDestroyed(stream)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof stream.destroy === 'function') {
|
||||||
|
if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
|
||||||
|
// See: https://github.com/nodejs/node/pull/38505/files
|
||||||
|
stream.socket = null
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.destroy(err)
|
||||||
|
} else if (err) {
|
||||||
|
process.nextTick((stream, err) => {
|
||||||
|
stream.emit('error', err)
|
||||||
|
}, stream, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stream.destroyed !== true) {
|
||||||
|
stream[kDestroyed] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
|
||||||
|
function parseKeepAliveTimeout (val) {
|
||||||
|
const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
|
||||||
|
return m ? parseInt(m[1], 10) * 1000 : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseHeaders (headers, obj = {}) {
|
||||||
|
// For H2 support
|
||||||
|
if (!Array.isArray(headers)) return headers
|
||||||
|
|
||||||
|
for (let i = 0; i < headers.length; i += 2) {
|
||||||
|
const key = headers[i].toString().toLowerCase()
|
||||||
|
let val = obj[key]
|
||||||
|
|
||||||
|
if (!val) {
|
||||||
|
if (Array.isArray(headers[i + 1])) {
|
||||||
|
obj[key] = headers[i + 1]
|
||||||
|
} else {
|
||||||
|
obj[key] = headers[i + 1].toString('utf8')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!Array.isArray(val)) {
|
||||||
|
val = [val]
|
||||||
|
obj[key] = val
|
||||||
|
}
|
||||||
|
val.push(headers[i + 1].toString('utf8'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See https://github.com/nodejs/node/pull/46528
|
||||||
|
if ('content-length' in obj && 'content-disposition' in obj) {
|
||||||
|
obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseRawHeaders (headers) {
|
||||||
|
const ret = []
|
||||||
|
let hasContentLength = false
|
||||||
|
let contentDispositionIdx = -1
|
||||||
|
|
||||||
|
for (let n = 0; n < headers.length; n += 2) {
|
||||||
|
const key = headers[n + 0].toString()
|
||||||
|
const val = headers[n + 1].toString('utf8')
|
||||||
|
|
||||||
|
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
|
||||||
|
ret.push(key, val)
|
||||||
|
hasContentLength = true
|
||||||
|
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
|
||||||
|
contentDispositionIdx = ret.push(key, val) - 1
|
||||||
|
} else {
|
||||||
|
ret.push(key, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// See https://github.com/nodejs/node/pull/46528
|
||||||
|
if (hasContentLength && contentDispositionIdx !== -1) {
|
||||||
|
ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
function isBuffer (buffer) {
|
||||||
|
// See, https://github.com/mcollina/undici/pull/319
|
||||||
|
return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateHandler (handler, method, upgrade) {
|
||||||
|
if (!handler || typeof handler !== 'object') {
|
||||||
|
throw new InvalidArgumentError('handler must be an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onConnect !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onConnect method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onError !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onError method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
|
||||||
|
throw new InvalidArgumentError('invalid onBodySent method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upgrade || method === 'CONNECT') {
|
||||||
|
if (typeof handler.onUpgrade !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onUpgrade method')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (typeof handler.onHeaders !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onHeaders method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onData !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onData method')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof handler.onComplete !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onComplete method')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A body is disturbed if it has been read from and it cannot
|
||||||
|
// be re-used without losing state or data.
|
||||||
|
function isDisturbed (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isDisturbed
|
||||||
|
? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
|
||||||
|
: body[kBodyUsed] ||
|
||||||
|
body.readableDidRead ||
|
||||||
|
(body._readableState && body._readableState.dataEmitted) ||
|
||||||
|
isReadableAborted(body)
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isErrored (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isErrored
|
||||||
|
? stream.isErrored(body)
|
||||||
|
: /state: 'errored'/.test(nodeUtil.inspect(body)
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
function isReadable (body) {
|
||||||
|
return !!(body && (
|
||||||
|
stream.isReadable
|
||||||
|
? stream.isReadable(body)
|
||||||
|
: /state: 'readable'/.test(nodeUtil.inspect(body)
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSocketInfo (socket) {
|
||||||
|
return {
|
||||||
|
localAddress: socket.localAddress,
|
||||||
|
localPort: socket.localPort,
|
||||||
|
remoteAddress: socket.remoteAddress,
|
||||||
|
remotePort: socket.remotePort,
|
||||||
|
remoteFamily: socket.remoteFamily,
|
||||||
|
timeout: socket.timeout,
|
||||||
|
bytesWritten: socket.bytesWritten,
|
||||||
|
bytesRead: socket.bytesRead
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function * convertIterableToBuffer (iterable) {
|
||||||
|
for await (const chunk of iterable) {
|
||||||
|
yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let ReadableStream
|
||||||
|
function ReadableStreamFrom (iterable) {
|
||||||
|
if (!ReadableStream) {
|
||||||
|
ReadableStream = require('stream/web').ReadableStream
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ReadableStream.from) {
|
||||||
|
return ReadableStream.from(convertIterableToBuffer(iterable))
|
||||||
|
}
|
||||||
|
|
||||||
|
let iterator
|
||||||
|
return new ReadableStream(
|
||||||
|
{
|
||||||
|
async start () {
|
||||||
|
iterator = iterable[Symbol.asyncIterator]()
|
||||||
|
},
|
||||||
|
async pull (controller) {
|
||||||
|
const { done, value } = await iterator.next()
|
||||||
|
if (done) {
|
||||||
|
queueMicrotask(() => {
|
||||||
|
controller.close()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
|
||||||
|
controller.enqueue(new Uint8Array(buf))
|
||||||
|
}
|
||||||
|
return controller.desiredSize > 0
|
||||||
|
},
|
||||||
|
async cancel (reason) {
|
||||||
|
await iterator.return()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The chunk should be a FormData instance and contains
|
||||||
|
// all the required methods.
|
||||||
|
function isFormDataLike (object) {
|
||||||
|
return (
|
||||||
|
object &&
|
||||||
|
typeof object === 'object' &&
|
||||||
|
typeof object.append === 'function' &&
|
||||||
|
typeof object.delete === 'function' &&
|
||||||
|
typeof object.get === 'function' &&
|
||||||
|
typeof object.getAll === 'function' &&
|
||||||
|
typeof object.has === 'function' &&
|
||||||
|
typeof object.set === 'function' &&
|
||||||
|
object[Symbol.toStringTag] === 'FormData'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function throwIfAborted (signal) {
|
||||||
|
if (!signal) { return }
|
||||||
|
if (typeof signal.throwIfAborted === 'function') {
|
||||||
|
signal.throwIfAborted()
|
||||||
|
} else {
|
||||||
|
if (signal.aborted) {
|
||||||
|
// DOMException not available < v17.0.0
|
||||||
|
const err = new Error('The operation was aborted')
|
||||||
|
err.name = 'AbortError'
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let events
|
||||||
|
function addAbortListener (signal, listener) {
|
||||||
|
if (typeof Symbol.dispose === 'symbol') {
|
||||||
|
if (!events) {
|
||||||
|
events = require('events')
|
||||||
|
}
|
||||||
|
if (typeof events.addAbortListener === 'function' && 'aborted' in signal) {
|
||||||
|
return events.addAbortListener(signal, listener)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ('addEventListener' in signal) {
|
||||||
|
signal.addEventListener('abort', listener, { once: true })
|
||||||
|
return () => signal.removeEventListener('abort', listener)
|
||||||
|
}
|
||||||
|
signal.addListener('abort', listener)
|
||||||
|
return () => signal.removeListener('abort', listener)
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasToWellFormed = !!String.prototype.toWellFormed
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} val
|
||||||
|
*/
|
||||||
|
function toUSVString (val) {
|
||||||
|
if (hasToWellFormed) {
|
||||||
|
return `${val}`.toWellFormed()
|
||||||
|
} else if (nodeUtil.toUSVString) {
|
||||||
|
return nodeUtil.toUSVString(val)
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${val}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const kEnumerableProperty = Object.create(null)
|
||||||
|
kEnumerableProperty.enumerable = true
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
kEnumerableProperty,
|
||||||
|
nop,
|
||||||
|
isDisturbed,
|
||||||
|
isErrored,
|
||||||
|
isReadable,
|
||||||
|
toUSVString,
|
||||||
|
isReadableAborted,
|
||||||
|
isBlobLike,
|
||||||
|
parseOrigin,
|
||||||
|
parseURL,
|
||||||
|
getServerName,
|
||||||
|
isStream,
|
||||||
|
isIterable,
|
||||||
|
isAsyncIterable,
|
||||||
|
isDestroyed,
|
||||||
|
parseRawHeaders,
|
||||||
|
parseHeaders,
|
||||||
|
parseKeepAliveTimeout,
|
||||||
|
destroy,
|
||||||
|
bodyLength,
|
||||||
|
deepClone,
|
||||||
|
ReadableStreamFrom,
|
||||||
|
isBuffer,
|
||||||
|
validateHandler,
|
||||||
|
getSocketInfo,
|
||||||
|
isFormDataLike,
|
||||||
|
buildURL,
|
||||||
|
throwIfAborted,
|
||||||
|
addAbortListener,
|
||||||
|
nodeMajor,
|
||||||
|
nodeMinor,
|
||||||
|
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13)
|
||||||
|
}
|
192
node_modules/undici/lib/dispatcher-base.js
generated
vendored
Normal file
192
node_modules/undici/lib/dispatcher-base.js
generated
vendored
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const Dispatcher = require('./dispatcher')
|
||||||
|
const {
|
||||||
|
ClientDestroyedError,
|
||||||
|
ClientClosedError,
|
||||||
|
InvalidArgumentError
|
||||||
|
} = require('./core/errors')
|
||||||
|
const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols')
|
||||||
|
|
||||||
|
const kDestroyed = Symbol('destroyed')
|
||||||
|
const kClosed = Symbol('closed')
|
||||||
|
const kOnDestroyed = Symbol('onDestroyed')
|
||||||
|
const kOnClosed = Symbol('onClosed')
|
||||||
|
const kInterceptedDispatch = Symbol('Intercepted Dispatch')
|
||||||
|
|
||||||
|
class DispatcherBase extends Dispatcher {
|
||||||
|
constructor () {
|
||||||
|
super()
|
||||||
|
|
||||||
|
this[kDestroyed] = false
|
||||||
|
this[kOnDestroyed] = null
|
||||||
|
this[kClosed] = false
|
||||||
|
this[kOnClosed] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
get destroyed () {
|
||||||
|
return this[kDestroyed]
|
||||||
|
}
|
||||||
|
|
||||||
|
get closed () {
|
||||||
|
return this[kClosed]
|
||||||
|
}
|
||||||
|
|
||||||
|
get interceptors () {
|
||||||
|
return this[kInterceptors]
|
||||||
|
}
|
||||||
|
|
||||||
|
set interceptors (newInterceptors) {
|
||||||
|
if (newInterceptors) {
|
||||||
|
for (let i = newInterceptors.length - 1; i >= 0; i--) {
|
||||||
|
const interceptor = this[kInterceptors][i]
|
||||||
|
if (typeof interceptor !== 'function') {
|
||||||
|
throw new InvalidArgumentError('interceptor must be an function')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kInterceptors] = newInterceptors
|
||||||
|
}
|
||||||
|
|
||||||
|
close (callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this.close((err, data) => {
|
||||||
|
return err ? reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed]) {
|
||||||
|
queueMicrotask(() => callback(new ClientDestroyedError(), null))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kClosed]) {
|
||||||
|
if (this[kOnClosed]) {
|
||||||
|
this[kOnClosed].push(callback)
|
||||||
|
} else {
|
||||||
|
queueMicrotask(() => callback(null, null))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kClosed] = true
|
||||||
|
this[kOnClosed].push(callback)
|
||||||
|
|
||||||
|
const onClosed = () => {
|
||||||
|
const callbacks = this[kOnClosed]
|
||||||
|
this[kOnClosed] = null
|
||||||
|
for (let i = 0; i < callbacks.length; i++) {
|
||||||
|
callbacks[i](null, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should not error.
|
||||||
|
this[kClose]()
|
||||||
|
.then(() => this.destroy())
|
||||||
|
.then(() => {
|
||||||
|
queueMicrotask(onClosed)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy (err, callback) {
|
||||||
|
if (typeof err === 'function') {
|
||||||
|
callback = err
|
||||||
|
err = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (callback === undefined) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this.destroy(err, (err, data) => {
|
||||||
|
return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof callback !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid callback')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed]) {
|
||||||
|
if (this[kOnDestroyed]) {
|
||||||
|
this[kOnDestroyed].push(callback)
|
||||||
|
} else {
|
||||||
|
queueMicrotask(() => callback(null, null))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!err) {
|
||||||
|
err = new ClientDestroyedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
this[kDestroyed] = true
|
||||||
|
this[kOnDestroyed] = this[kOnDestroyed] || []
|
||||||
|
this[kOnDestroyed].push(callback)
|
||||||
|
|
||||||
|
const onDestroyed = () => {
|
||||||
|
const callbacks = this[kOnDestroyed]
|
||||||
|
this[kOnDestroyed] = null
|
||||||
|
for (let i = 0; i < callbacks.length; i++) {
|
||||||
|
callbacks[i](null, null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Should not error.
|
||||||
|
this[kDestroy](err).then(() => {
|
||||||
|
queueMicrotask(onDestroyed)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
[kInterceptedDispatch] (opts, handler) {
|
||||||
|
if (!this[kInterceptors] || this[kInterceptors].length === 0) {
|
||||||
|
this[kInterceptedDispatch] = this[kDispatch]
|
||||||
|
return this[kDispatch](opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
let dispatch = this[kDispatch].bind(this)
|
||||||
|
for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
|
||||||
|
dispatch = this[kInterceptors][i](dispatch)
|
||||||
|
}
|
||||||
|
this[kInterceptedDispatch] = dispatch
|
||||||
|
return dispatch(opts, handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
dispatch (opts, handler) {
|
||||||
|
if (!handler || typeof handler !== 'object') {
|
||||||
|
throw new InvalidArgumentError('handler must be an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!opts || typeof opts !== 'object') {
|
||||||
|
throw new InvalidArgumentError('opts must be an object.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kDestroyed] || this[kOnDestroyed]) {
|
||||||
|
throw new ClientDestroyedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this[kClosed]) {
|
||||||
|
throw new ClientClosedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this[kInterceptedDispatch](opts, handler)
|
||||||
|
} catch (err) {
|
||||||
|
if (typeof handler.onError !== 'function') {
|
||||||
|
throw new InvalidArgumentError('invalid onError method')
|
||||||
|
}
|
||||||
|
|
||||||
|
handler.onError(err)
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DispatcherBase
|
19
node_modules/undici/lib/dispatcher.js
generated
vendored
Normal file
19
node_modules/undici/lib/dispatcher.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
const EventEmitter = require('events')
|
||||||
|
|
||||||
|
class Dispatcher extends EventEmitter {
|
||||||
|
dispatch () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
close () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy () {
|
||||||
|
throw new Error('not implemented')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Dispatcher
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user