Merge pull request 'Update to the npm installation and renaming of secrets.' (#27) from npm-update into main
All checks were successful
Auto Maintenance Cycle / pre-commit Autoupdate (push) Successful in 30s

Reviewed-on: #27
This commit is contained in:
Philipp Horstenkamp 2024-05-21 01:05:33 +02:00
commit 30c57b6a58
40 changed files with 787 additions and 328 deletions

View File

@ -2,10 +2,8 @@ name: Auto Maintenance Cycle
on: on:
push: push:
paths:
- .gitea/workflows/maintenance.yaml
schedule: schedule:
- cron: '0 0 * * *' - cron: 0 0 * * *
workflow_dispatch: workflow_dispatch:
jobs: jobs:
@ -15,16 +13,11 @@ jobs:
env: env:
SPECIFIC_BRANCH: chore/update-pre-commit SPECIFIC_BRANCH: chore/update-pre-commit
steps: steps:
- name: Check actor
run: echo "Actor ${{ github.actor }}"
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: main token: ${{ secrets.REPO_TOKEN_SERVER }}
token: ${{ secrets.REPO_TOKEN }}
- name: Attempt to checkout specific branch
uses: actions/checkout@v4
with:
ref: ${{ env.SPECIFIC_BRANCH }}
token: ${{ secrets.REPO_TOKEN }}
continue-on-error: true
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
- run: pip install pre-commit - run: pip install pre-commit
shell: bash shell: bash
@ -45,7 +38,7 @@ jobs:
if: steps.auto-commit-action.outputs.changes_detected == 'true' if: steps.auto-commit-action.outputs.changes_detected == 'true'
uses: https://git.horstenkamp.eu/Philipp/gitea-act-create-pr@main uses: https://git.horstenkamp.eu/Philipp/gitea-act-create-pr@main
with: with:
token: ${{ secrets.REPO_TOKEN }} token: ${{ secrets.REPO_TOKEN_SERVER }}
branch: ${{ env.SPECIFIC_BRANCH }} branch: ${{ env.SPECIFIC_BRANCH }}
title: Updates to the pre-commit action created at ${{ env.CURRENT_DATE }} title: Updates to the pre-commit action created at ${{ env.CURRENT_DATE }}
body: Update to the pre-commit action. body: Update to the pre-commit action.

View File

@ -1,7 +1,7 @@
exclude: ^node_modules/ exclude: ^node_modules/
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 rev: v4.6.0
hooks: hooks:
- id: end-of-file-fixer - id: end-of-file-fixer
exclude: (.txt$|.ipynb$|README.md$|readme.mde$) exclude: (.txt$|.ipynb$|README.md$|readme.mde$)
@ -25,7 +25,7 @@ repos:
args: [--branch, main] args: [--branch, main]
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.12.0 rev: v2.13.0
hooks: hooks:
- id: pretty-format-ini - id: pretty-format-ini
args: [--autofix] args: [--autofix]
@ -35,7 +35,7 @@ repos:
args: [--autofix] args: [--autofix]
- repo: https://github.com/frnmst/md-toc - repo: https://github.com/frnmst/md-toc
rev: 8.2.2 rev: 9.0.0
hooks: hooks:
- id: md-toc - id: md-toc

View File

@ -42,8 +42,8 @@ async function getAndPostPullRequests() {
due_date: dueDate || undefined, due_date: dueDate || undefined,
milestone: milestone ? parseInt(milestone, 10) : 0, milestone: milestone ? parseInt(milestone, 10) : 0,
}; };
// Perform the POST request
// Perform the POST request
core.info(`URL for POST request: ${url}`); core.info(`URL for POST request: ${url}`);
core.info(`POST data being sent: ${JSON.stringify(postData, null, 2)}`); core.info(`POST data being sent: ${JSON.stringify(postData, null, 2)}`);
const postResponse = await fetch(`${url}?access_token=${token}`, { const postResponse = await fetch(`${url}?access_token=${token}`, {

18
node_modules/.package-lock.json generated vendored
View File

@ -14,18 +14,18 @@
} }
}, },
"node_modules/@actions/http-client": { "node_modules/@actions/http-client": {
"version": "2.2.0", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz",
"integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==",
"dependencies": { "dependencies": {
"tunnel": "^0.0.6", "tunnel": "^0.0.6",
"undici": "^5.25.4" "undici": "^5.25.4"
} }
}, },
"node_modules/@fastify/busboy": { "node_modules/@fastify/busboy": {
"version": "2.1.0", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.0.tgz", "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
"integrity": "sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==", "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
"engines": { "engines": {
"node": ">=14" "node": ">=14"
} }
@ -39,9 +39,9 @@
} }
}, },
"node_modules/undici": { "node_modules/undici": {
"version": "5.27.2", "version": "5.28.4",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.27.2.tgz", "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
"integrity": "sha512-iS857PdOEy/y3wlM3yRp+6SNQQ6xU0mmZcwRSriqk+et/cwWAtwmIGf6WkoDN2EK/AMdCO/dfXzIwi+rFMrjjQ==", "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
"dependencies": { "dependencies": {
"@fastify/busboy": "^2.0.0" "@fastify/busboy": "^2.0.0"
}, },

View File

@ -502,7 +502,7 @@ class HttpClient {
if (this._keepAlive && useProxy) { if (this._keepAlive && useProxy) {
agent = this._proxyAgent; agent = this._proxyAgent;
} }
if (this._keepAlive && !useProxy) { if (!useProxy) {
agent = this._agent; agent = this._agent;
} }
// if agent is already assigned use that agent. // if agent is already assigned use that agent.
@ -534,16 +534,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions); agent = tunnelAgent(agentOptions);
this._proxyAgent = agent; this._proxyAgent = agent;
} }
// if reusing agent across request and tunneling agent isn't assigned create a new agent // if tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) { if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets }; const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options); agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent; this._agent = agent;
} }
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) { if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/http-client", "name": "@actions/http-client",
"version": "2.2.0", "version": "2.2.1",
"description": "Actions Http Client", "description": "Actions Http Client",
"keywords": [ "keywords": [
"github", "github",

View File

@ -2,10 +2,10 @@
<div align="center"> <div align="center">
[![Build Status](https://github.com/fastify/busboy/workflows/ci/badge.svg)](https://github.com/fastify/busboy/actions) [![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions)
[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) [![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md) [![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md)
</div> </div>

View File

@ -78,7 +78,7 @@ Dicer.prototype._write = function (data, encoding, cb) {
if (this._headerFirst && this._isPreamble) { if (this._headerFirst && this._isPreamble) {
if (!this._part) { if (!this._part) {
this._part = new PartStream(this._partOpts) this._part = new PartStream(this._partOpts)
if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() }
} }
const r = this._hparser.push(data) const r = this._hparser.push(data)
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
@ -135,7 +135,7 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) {
} }
} }
if (this._dashes === 2) { if (this._dashes === 2) {
if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) }
this.reset() this.reset()
this._finished = true this._finished = true
// no more parts will be added // no more parts will be added
@ -153,7 +153,13 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) {
this._part._read = function (n) { this._part._read = function (n) {
self._unpause() self._unpause()
} }
if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } if (this._isPreamble && this.listenerCount('preamble') !== 0) {
this.emit('preamble', this._part)
} else if (this._isPreamble !== true && this.listenerCount('part') !== 0) {
this.emit('part', this._part)
} else {
this._ignore()
}
if (!this._isPreamble) { this._inHeader = true } if (!this._isPreamble) { this._inHeader = true }
} }
if (data && start < end && !this._ignoreData) { if (data && start < end && !this._ignoreData) {

View File

@ -163,7 +163,7 @@ function Multipart (boy, cfg) {
++nfiles ++nfiles
if (!boy._events.file) { if (boy.listenerCount('file') === 0) {
self.parser._ignore() self.parser._ignore()
return return
} }

View File

@ -96,7 +96,7 @@ const decoders = {
if (textDecoders.has(this.toString())) { if (textDecoders.has(this.toString())) {
try { try {
return textDecoders.get(this).decode(data) return textDecoders.get(this).decode(data)
} catch (e) { } } catch {}
} }
return typeof data === 'string' return typeof data === 'string'
? data ? data

View File

@ -1,6 +1,6 @@
{ {
"name": "@fastify/busboy", "name": "@fastify/busboy",
"version": "2.1.0", "version": "2.1.1",
"private": false, "private": false,
"author": "Brian White <mscdex@mscdex.net>", "author": "Brian White <mscdex@mscdex.net>",
"contributors": [ "contributors": [
@ -43,7 +43,7 @@
"standard": "^17.0.0", "standard": "^17.0.0",
"tap": "^16.3.8", "tap": "^16.3.8",
"tinybench": "^2.5.1", "tinybench": "^2.5.1",
"tsd": "^0.29.0", "tsd": "^0.30.0",
"typescript": "^5.0.2" "typescript": "^5.0.2"
}, },
"keywords": [ "keywords": [
@ -55,7 +55,7 @@
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/fastify/busboy.git" "url": "git+https://github.com/fastify/busboy.git"
}, },
"tsd": { "tsd": {
"directory": "test/types", "directory": "test/types",

View File

@ -33,7 +33,7 @@ Returns: `Client`
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version. * **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. * **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. * **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overriden by a SETTINGS remote frame. * **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
#### Parameter: `ConnectOptions` #### Parameter: `ConnectOptions`

View File

@ -35,8 +35,7 @@ const mockPool = mockAgent.get('http://localhost:3000')
### `MockPool.intercept(options)` ### `MockPool.intercept(options)`
This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted. When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted.
@ -54,11 +53,11 @@ Returns: `MockInterceptor` corresponding to the input options.
### Parameter: `MockPoolInterceptOptions` ### Parameter: `MockPoolInterceptOptions`
* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. * **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting.
* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`. * **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`.
* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body. * **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body.
* **headers** `Record<string, string | RegExp | (body: string) => boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way. * **headers** `Record<string, string | RegExp | (body: string) => boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way.
* **query** `Record<string, any> | null` - (optional) - a matcher for the HTTP request query string params. * **query** `Record<string, any> | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`.
### Return: `MockInterceptor` ### Return: `MockInterceptor`
@ -458,6 +457,41 @@ const result3 = await request('http://localhost:3000/foo')
// Will not match and make attempt a real request // Will not match and make attempt a real request
``` ```
#### Example - Mocked request with path callback
```js
import { MockAgent, setGlobalDispatcher, request } from 'undici'
import querystring from 'querystring'
const mockAgent = new MockAgent()
setGlobalDispatcher(mockAgent)
const mockPool = mockAgent.get('http://localhost:3000')
const matchPath = requestPath => {
const [pathname, search] = requestPath.split('?')
const requestQuery = querystring.parse(search)
if (!pathname.startsWith('/foo')) {
return false
}
if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') {
return false
}
return true
}
mockPool.intercept({
path: matchPath,
method: 'GET'
}).reply(200, 'foo')
const result = await request('http://localhost:3000/foo?foo=bar')
// Will match and return mocked data
```
### `MockPool.close()` ### `MockPool.close()`
Closes the mock pool and de-registers from associated MockAgent. Closes the mock pool and de-registers from associated MockAgent.

108
node_modules/undici/docs/api/RetryHandler.md generated vendored Normal file
View File

@ -0,0 +1,108 @@
# Class: RetryHandler
Extends: `undici.DispatcherHandlers`
A handler class that implements the retry logic for a request.
## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])`
Arguments:
- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`.
- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle.
Returns: `retryHandler`
### Parameter: `Dispatch.DispatchOptions & RetryOptions`
Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions).
#### `RetryOptions`
- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second)
- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2`
- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true`
-
- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']`
- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]`
- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN',
**`RetryContext`**
- `state`: `RetryState` - Current retry state. It can be mutated.
- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler.
### Parameter `RetryHandlers`
- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise<Dispatch.DispatchResponse>` (required) - Dispatch function to be called after every retry.
- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted.
Examples:
```js
const client = new Client(`http://localhost:${server.address().port}`);
const chunks = [];
const handler = new RetryHandler(
{
...dispatchOptions,
retryOptions: {
// custom retry function
retry: function (err, state, callback) {
counter++;
if (err.code && err.code === "UND_ERR_DESTROYED") {
callback(err);
return;
}
if (err.statusCode === 206) {
callback(err);
return;
}
setTimeout(() => callback(null), 1000);
},
},
},
{
dispatch: (...args) => {
return client.dispatch(...args);
},
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {
// do something with headers
},
onData(chunk) {
chunks.push(chunk);
return true;
},
onComplete() {},
onError() {
// handle error properly
},
},
}
);
```
#### Example - Basic RetryHandler with defaults
```js
const client = new Client(`http://localhost:${server.address().port}`);
const handler = new RetryHandler(dispatchOptions, {
dispatch: client.dispatch.bind(client),
handler: {
onConnect() {},
onBodySent() {},
onHeaders(status, _rawHeaders, resume, _statusMessage) {},
onData(chunk) {},
onComplete() {},
onError(err) {},
},
});
```

2
node_modules/undici/index.js generated vendored
View File

@ -15,6 +15,7 @@ const MockAgent = require('./lib/mock/mock-agent')
const MockPool = require('./lib/mock/mock-pool') const MockPool = require('./lib/mock/mock-pool')
const mockErrors = require('./lib/mock/mock-errors') const mockErrors = require('./lib/mock/mock-errors')
const ProxyAgent = require('./lib/proxy-agent') const ProxyAgent = require('./lib/proxy-agent')
const RetryHandler = require('./lib/handler/RetryHandler')
const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global') const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global')
const DecoratorHandler = require('./lib/handler/DecoratorHandler') const DecoratorHandler = require('./lib/handler/DecoratorHandler')
const RedirectHandler = require('./lib/handler/RedirectHandler') const RedirectHandler = require('./lib/handler/RedirectHandler')
@ -36,6 +37,7 @@ module.exports.Pool = Pool
module.exports.BalancedPool = BalancedPool module.exports.BalancedPool = BalancedPool
module.exports.Agent = Agent module.exports.Agent = Agent
module.exports.ProxyAgent = ProxyAgent module.exports.ProxyAgent = ProxyAgent
module.exports.RetryHandler = RetryHandler
module.exports.DecoratorHandler = DecoratorHandler module.exports.DecoratorHandler = DecoratorHandler
module.exports.RedirectHandler = RedirectHandler module.exports.RedirectHandler = RedirectHandler

View File

@ -177,3 +177,4 @@ function request (opts, callback) {
} }
module.exports = request module.exports = request
module.exports.RequestHandler = RequestHandler

View File

@ -16,6 +16,8 @@ const kBody = Symbol('kBody')
const kAbort = Symbol('abort') const kAbort = Symbol('abort')
const kContentType = Symbol('kContentType') const kContentType = Symbol('kContentType')
const noop = () => {}
module.exports = class BodyReadable extends Readable { module.exports = class BodyReadable extends Readable {
constructor ({ constructor ({
resume, resume,
@ -149,37 +151,50 @@ module.exports = class BodyReadable extends Readable {
return this[kBody] return this[kBody]
} }
async dump (opts) { dump (opts) {
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
const signal = opts && opts.signal const signal = opts && opts.signal
const abortFn = () => {
this.destroy()
}
let signalListenerCleanup
if (signal) { if (signal) {
if (typeof signal !== 'object' || !('aborted' in signal)) { try {
throw new InvalidArgumentError('signal must be an AbortSignal') if (typeof signal !== 'object' || !('aborted' in signal)) {
} throw new InvalidArgumentError('signal must be an AbortSignal')
util.throwIfAborted(signal)
signalListenerCleanup = util.addAbortListener(signal, abortFn)
}
try {
for await (const chunk of this) {
util.throwIfAborted(signal)
limit -= Buffer.byteLength(chunk)
if (limit < 0) {
return
} }
} util.throwIfAborted(signal)
} catch { } catch (err) {
util.throwIfAborted(signal) return Promise.reject(err)
} finally {
if (typeof signalListenerCleanup === 'function') {
signalListenerCleanup()
} else if (signalListenerCleanup) {
signalListenerCleanup[Symbol.dispose]()
} }
} }
if (this.closed) {
return Promise.resolve(null)
}
return new Promise((resolve, reject) => {
const signalListenerCleanup = signal
? util.addAbortListener(signal, () => {
this.destroy()
})
: noop
this
.on('close', function () {
signalListenerCleanup()
if (signal && signal.aborted) {
reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
} else {
resolve(null)
}
})
.on('error', noop)
.on('data', function (chunk) {
limit -= chunk.length
if (limit <= 0) {
this.destroy()
}
})
.resume()
})
} }
} }

View File

@ -1,5 +1,5 @@
'use strict' 'use strict'
module.exports = { module.exports = {
kConstruct: Symbol('constructable') kConstruct: require('../core/symbols').kConstruct
} }

48
node_modules/undici/lib/client.js generated vendored
View File

@ -917,11 +917,9 @@ class Parser {
socket[kReset] = true socket[kReset] = true
} }
let pause const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
try {
pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false if (request.aborted) {
} catch (err) {
util.destroy(socket, err)
return -1 return -1
} }
@ -968,13 +966,8 @@ class Parser {
this.bytesRead += buf.length this.bytesRead += buf.length
try { if (request.onData(buf) === false) {
if (request.onData(buf) === false) { return constants.ERROR.PAUSED
return constants.ERROR.PAUSED
}
} catch (err) {
util.destroy(socket, err)
return -1
} }
} }
@ -1015,11 +1008,7 @@ class Parser {
return -1 return -1
} }
try { request.onComplete(headers)
request.onComplete(headers)
} catch (err) {
errorRequest(client, request, err)
}
client[kQueue][client[kRunningIdx]++] = null client[kQueue][client[kRunningIdx]++] = null
@ -1183,7 +1172,7 @@ async function connect (client) {
const idx = hostname.indexOf(']') const idx = hostname.indexOf(']')
assert(idx !== -1) assert(idx !== -1)
const ip = hostname.substr(1, idx - 1) const ip = hostname.substring(1, idx)
assert(net.isIP(ip)) assert(net.isIP(ip))
hostname = ip hostname = ip
@ -1682,6 +1671,7 @@ function writeH2 (client, session, request) {
return false return false
} }
/** @type {import('node:http2').ClientHttp2Stream} */
let stream let stream
const h2State = client[kHTTP2SessionState] const h2State = client[kHTTP2SessionState]
@ -1777,14 +1767,10 @@ function writeH2 (client, session, request) {
const shouldEndStream = method === 'GET' || method === 'HEAD' const shouldEndStream = method === 'GET' || method === 'HEAD'
if (expectContinue) { if (expectContinue) {
headers[HTTP2_HEADER_EXPECT] = '100-continue' headers[HTTP2_HEADER_EXPECT] = '100-continue'
/**
* @type {import('node:http2').ClientHttp2Stream}
*/
stream = session.request(headers, { endStream: shouldEndStream, signal }) stream = session.request(headers, { endStream: shouldEndStream, signal })
stream.once('continue', writeBodyH2) stream.once('continue', writeBodyH2)
} else { } else {
/** @type {import('node:http2').ClientHttp2Stream} */
stream = session.request(headers, { stream = session.request(headers, {
endStream: shouldEndStream, endStream: shouldEndStream,
signal signal
@ -1796,7 +1782,9 @@ function writeH2 (client, session, request) {
++h2State.openStreams ++h2State.openStreams
stream.once('response', headers => { stream.once('response', headers => {
if (request.onHeaders(Number(headers[HTTP2_HEADER_STATUS]), headers, stream.resume.bind(stream), '') === false) { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {
stream.pause() stream.pause()
} }
}) })
@ -1806,13 +1794,17 @@ function writeH2 (client, session, request) {
}) })
stream.on('data', (chunk) => { stream.on('data', (chunk) => {
if (request.onData(chunk) === false) stream.pause() if (request.onData(chunk) === false) {
stream.pause()
}
}) })
stream.once('close', () => { stream.once('close', () => {
h2State.openStreams -= 1 h2State.openStreams -= 1
// TODO(HTTP/2): unref only if current streams count is 0 // TODO(HTTP/2): unref only if current streams count is 0
if (h2State.openStreams === 0) session.unref() if (h2State.openStreams === 0) {
session.unref()
}
}) })
stream.once('error', function (err) { stream.once('error', function (err) {
@ -1972,7 +1964,11 @@ function writeStream ({ h2stream, body, client, request, socket, contentLength,
} }
} }
const onAbort = function () { const onAbort = function () {
onFinished(new RequestAbortedError()) if (finished) {
return
}
const err = new RequestAbortedError()
queueMicrotask(() => onFinished(err))
} }
const onFinished = function (err) { const onFinished = function (err) {
if (finished) { if (finished) {

View File

@ -193,6 +193,19 @@ class ResponseExceededMaxSizeError extends UndiciError {
} }
} }
class RequestRetryError extends UndiciError {
constructor (message, code, { headers, data }) {
super(message)
Error.captureStackTrace(this, RequestRetryError)
this.name = 'RequestRetryError'
this.message = message || 'Request retry error'
this.code = 'UND_ERR_REQ_RETRY'
this.statusCode = code
this.data = data
this.headers = headers
}
}
module.exports = { module.exports = {
HTTPParserError, HTTPParserError,
UndiciError, UndiciError,
@ -212,5 +225,6 @@ module.exports = {
NotSupportedError, NotSupportedError,
ResponseContentLengthMismatchError, ResponseContentLengthMismatchError,
BalancedPoolMissingUpstreamError, BalancedPoolMissingUpstreamError,
ResponseExceededMaxSizeError ResponseExceededMaxSizeError,
RequestRetryError
} }

View File

@ -230,9 +230,9 @@ class Request {
onBodySent (chunk) { onBodySent (chunk) {
if (this[kHandler].onBodySent) { if (this[kHandler].onBodySent) {
try { try {
this[kHandler].onBodySent(chunk) return this[kHandler].onBodySent(chunk)
} catch (err) { } catch (err) {
this.onError(err) this.abort(err)
} }
} }
} }
@ -244,9 +244,9 @@ class Request {
if (this[kHandler].onRequestSent) { if (this[kHandler].onRequestSent) {
try { try {
this[kHandler].onRequestSent() return this[kHandler].onRequestSent()
} catch (err) { } catch (err) {
this.onError(err) this.abort(err)
} }
} }
} }
@ -271,14 +271,23 @@ class Request {
channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
} }
return this[kHandler].onHeaders(statusCode, headers, resume, statusText) try {
return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
} catch (err) {
this.abort(err)
}
} }
onData (chunk) { onData (chunk) {
assert(!this.aborted) assert(!this.aborted)
assert(!this.completed) assert(!this.completed)
return this[kHandler].onData(chunk) try {
return this[kHandler].onData(chunk)
} catch (err) {
this.abort(err)
return false
}
} }
onUpgrade (statusCode, headers, socket) { onUpgrade (statusCode, headers, socket) {
@ -297,7 +306,13 @@ class Request {
if (channels.trailers.hasSubscribers) { if (channels.trailers.hasSubscribers) {
channels.trailers.publish({ request: this, trailers }) channels.trailers.publish({ request: this, trailers })
} }
return this[kHandler].onComplete(trailers)
try {
return this[kHandler].onComplete(trailers)
} catch (err) {
// TODO (fix): This might be a bad idea?
this.onError(err)
}
} }
onError (error) { onError (error) {
@ -311,6 +326,7 @@ class Request {
return return
} }
this.aborted = true this.aborted = true
return this[kHandler].onError(error) return this[kHandler].onError(error)
} }

View File

@ -57,5 +57,7 @@ module.exports = {
kHTTP2BuildRequest: Symbol('http2 build request'), kHTTP2BuildRequest: Symbol('http2 build request'),
kHTTP1BuildRequest: Symbol('http1 build request'), kHTTP1BuildRequest: Symbol('http1 build request'),
kHTTP2CopyHeaders: Symbol('http2 copy headers'), kHTTP2CopyHeaders: Symbol('http2 copy headers'),
kHTTPConnVersion: Symbol('http connection version') kHTTPConnVersion: Symbol('http connection version'),
kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
kConstruct: Symbol('constructable')
} }

45
node_modules/undici/lib/core/util.js generated vendored
View File

@ -9,6 +9,7 @@ const { InvalidArgumentError } = require('./errors')
const { Blob } = require('buffer') const { Blob } = require('buffer')
const nodeUtil = require('util') const nodeUtil = require('util')
const { stringify } = require('querystring') const { stringify } = require('querystring')
const { headerNameLowerCasedRecord } = require('./constants')
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -125,13 +126,13 @@ function getHostname (host) {
const idx = host.indexOf(']') const idx = host.indexOf(']')
assert(idx !== -1) assert(idx !== -1)
return host.substr(1, idx - 1) return host.substring(1, idx)
} }
const idx = host.indexOf(':') const idx = host.indexOf(':')
if (idx === -1) return host if (idx === -1) return host
return host.substr(0, idx) return host.substring(0, idx)
} }
// IP addresses are not valid server names per RFC6066 // IP addresses are not valid server names per RFC6066
@ -218,6 +219,15 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null return m ? parseInt(m[1], 10) * 1000 : null
} }
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) { function parseHeaders (headers, obj = {}) {
// For H2 support // For H2 support
if (!Array.isArray(headers)) return headers if (!Array.isArray(headers)) return headers
@ -228,7 +238,7 @@ function parseHeaders (headers, obj = {}) {
if (!val) { if (!val) {
if (Array.isArray(headers[i + 1])) { if (Array.isArray(headers[i + 1])) {
obj[key] = headers[i + 1] obj[key] = headers[i + 1].map(x => x.toString('utf8'))
} else { } else {
obj[key] = headers[i + 1].toString('utf8') obj[key] = headers[i + 1].toString('utf8')
} }
@ -431,16 +441,7 @@ function throwIfAborted (signal) {
} }
} }
let events
function addAbortListener (signal, listener) { function addAbortListener (signal, listener) {
if (typeof Symbol.dispose === 'symbol') {
if (!events) {
events = require('events')
}
if (typeof events.addAbortListener === 'function' && 'aborted' in signal) {
return events.addAbortListener(signal, listener)
}
}
if ('addEventListener' in signal) { if ('addEventListener' in signal) {
signal.addEventListener('abort', listener, { once: true }) signal.addEventListener('abort', listener, { once: true })
return () => signal.removeEventListener('abort', listener) return () => signal.removeEventListener('abort', listener)
@ -464,6 +465,21 @@ function toUSVString (val) {
return `${val}` return `${val}`
} }
// Parsed accordingly to RFC 9110
// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
function parseRangeHeader (range) {
if (range == null || range === '') return { start: 0, end: null, size: null }
const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
return m
? {
start: parseInt(m[1]),
end: m[2] ? parseInt(m[2]) : null,
size: m[3] ? parseInt(m[3]) : null
}
: null
}
const kEnumerableProperty = Object.create(null) const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true kEnumerableProperty.enumerable = true
@ -483,6 +499,7 @@ module.exports = {
isIterable, isIterable,
isAsyncIterable, isAsyncIterable,
isDestroyed, isDestroyed,
headerNameToString,
parseRawHeaders, parseRawHeaders,
parseHeaders, parseHeaders,
parseKeepAliveTimeout, parseKeepAliveTimeout,
@ -497,7 +514,9 @@ module.exports = {
buildURL, buildURL,
throwIfAborted, throwIfAborted,
addAbortListener, addAbortListener,
parseRangeHeader,
nodeMajor, nodeMajor,
nodeMinor, nodeMinor,
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13) nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
} }

View File

@ -119,17 +119,14 @@ function dataURLProcessor (dataURL) {
* @param {boolean} excludeFragment * @param {boolean} excludeFragment
*/ */
function URLSerializer (url, excludeFragment = false) { function URLSerializer (url, excludeFragment = false) {
const href = url.href
if (!excludeFragment) { if (!excludeFragment) {
return href return url.href
} }
const hash = href.lastIndexOf('#') const href = url.href
if (hash === -1) { const hashLength = url.hash.length
return href
} return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
return href.slice(0, hash)
} }
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points

View File

@ -2,7 +2,7 @@
'use strict' 'use strict'
const { kHeadersList } = require('../core/symbols') const { kHeadersList, kConstruct } = require('../core/symbols')
const { kGuard } = require('./symbols') const { kGuard } = require('./symbols')
const { kEnumerableProperty } = require('../core/util') const { kEnumerableProperty } = require('../core/util')
const { const {
@ -16,6 +16,13 @@ const assert = require('assert')
const kHeadersMap = Symbol('headers map') const kHeadersMap = Symbol('headers map')
const kHeadersSortedMap = Symbol('headers map sorted') const kHeadersSortedMap = Symbol('headers map sorted')
/**
* @param {number} code
*/
function isHTTPWhiteSpaceCharCode (code) {
return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
}
/** /**
* @see https://fetch.spec.whatwg.org/#concept-header-value-normalize * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
* @param {string} potentialValue * @param {string} potentialValue
@ -24,12 +31,12 @@ function headerValueNormalize (potentialValue) {
// To normalize a byte sequence potentialValue, remove // To normalize a byte sequence potentialValue, remove
// any leading and trailing HTTP whitespace bytes from // any leading and trailing HTTP whitespace bytes from
// potentialValue. // potentialValue.
let i = 0; let j = potentialValue.length
// Trimming the end with `.replace()` and a RegExp is typically subject to while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
// ReDoS. This is safer and faster. while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
let i = potentialValue.length
while (/[\r\n\t ]/.test(potentialValue.charAt(--i))); return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
return potentialValue.slice(0, i + 1).replace(/^[\r\n\t ]+/, '')
} }
function fill (headers, object) { function fill (headers, object) {
@ -38,7 +45,8 @@ function fill (headers, object) {
// 1. If object is a sequence, then for each header in object: // 1. If object is a sequence, then for each header in object:
// Note: webidl conversion to array has already been done. // Note: webidl conversion to array has already been done.
if (Array.isArray(object)) { if (Array.isArray(object)) {
for (const header of object) { for (let i = 0; i < object.length; ++i) {
const header = object[i]
// 1. If header does not contain exactly two items, then throw a TypeError. // 1. If header does not contain exactly two items, then throw a TypeError.
if (header.length !== 2) { if (header.length !== 2) {
throw webidl.errors.exception({ throw webidl.errors.exception({
@ -48,15 +56,16 @@ function fill (headers, object) {
} }
// 2. Append (headers first item, headers second item) to headers. // 2. Append (headers first item, headers second item) to headers.
headers.append(header[0], header[1]) appendHeader(headers, header[0], header[1])
} }
} else if (typeof object === 'object' && object !== null) { } else if (typeof object === 'object' && object !== null) {
// Note: null should throw // Note: null should throw
// 2. Otherwise, object is a record, then for each key → value in object, // 2. Otherwise, object is a record, then for each key → value in object,
// append (key, value) to headers // append (key, value) to headers
for (const [key, value] of Object.entries(object)) { const keys = Object.keys(object)
headers.append(key, value) for (let i = 0; i < keys.length; ++i) {
appendHeader(headers, keys[i], object[keys[i]])
} }
} else { } else {
throw webidl.errors.conversionFailed({ throw webidl.errors.conversionFailed({
@ -67,6 +76,50 @@ function fill (headers, object) {
} }
} }
/**
* @see https://fetch.spec.whatwg.org/#concept-headers-append
*/
function appendHeader (headers, name, value) {
// 1. Normalize value.
value = headerValueNormalize(value)
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if (!isValidHeaderName(name)) {
throw webidl.errors.invalidArgument({
prefix: 'Headers.append',
value: name,
type: 'header name'
})
} else if (!isValidHeaderValue(value)) {
throw webidl.errors.invalidArgument({
prefix: 'Headers.append',
value,
type: 'header value'
})
}
// 3. If headerss guard is "immutable", then throw a TypeError.
// 4. Otherwise, if headerss guard is "request" and name is a
// forbidden header name, return.
// Note: undici does not implement forbidden header names
if (headers[kGuard] === 'immutable') {
throw new TypeError('immutable')
} else if (headers[kGuard] === 'request-no-cors') {
// 5. Otherwise, if headerss guard is "request-no-cors":
// TODO
}
// 6. Otherwise, if headerss guard is "response" and name is a
// forbidden response-header name, return.
// 7. Append (name, value) to headerss header list.
return headers[kHeadersList].append(name, value)
// 8. If headerss guard is "request-no-cors", then remove
// privileged no-CORS request headers from headers
}
class HeadersList { class HeadersList {
/** @type {[string, string][]|null} */ /** @type {[string, string][]|null} */
cookies = null cookies = null
@ -75,7 +128,7 @@ class HeadersList {
if (init instanceof HeadersList) { if (init instanceof HeadersList) {
this[kHeadersMap] = new Map(init[kHeadersMap]) this[kHeadersMap] = new Map(init[kHeadersMap])
this[kHeadersSortedMap] = init[kHeadersSortedMap] this[kHeadersSortedMap] = init[kHeadersSortedMap]
this.cookies = init.cookies this.cookies = init.cookies === null ? null : [...init.cookies]
} else { } else {
this[kHeadersMap] = new Map(init) this[kHeadersMap] = new Map(init)
this[kHeadersSortedMap] = null this[kHeadersSortedMap] = null
@ -137,7 +190,7 @@ class HeadersList {
// the first such header to value and remove the // the first such header to value and remove the
// others. // others.
// 2. Otherwise, append header (name, value) to list. // 2. Otherwise, append header (name, value) to list.
return this[kHeadersMap].set(lowercaseName, { name, value }) this[kHeadersMap].set(lowercaseName, { name, value })
} }
// https://fetch.spec.whatwg.org/#concept-header-list-delete // https://fetch.spec.whatwg.org/#concept-header-list-delete
@ -150,20 +203,18 @@ class HeadersList {
this.cookies = null this.cookies = null
} }
return this[kHeadersMap].delete(name) this[kHeadersMap].delete(name)
} }
// https://fetch.spec.whatwg.org/#concept-header-list-get // https://fetch.spec.whatwg.org/#concept-header-list-get
get (name) { get (name) {
// 1. If list does not contain name, then return null. const value = this[kHeadersMap].get(name.toLowerCase())
if (!this.contains(name)) {
return null
}
// 1. If list does not contain name, then return null.
// 2. Return the values of all headers in list whose name // 2. Return the values of all headers in list whose name
// is a byte-case-insensitive match for name, // is a byte-case-insensitive match for name,
// separated from each other by 0x2C 0x20, in order. // separated from each other by 0x2C 0x20, in order.
return this[kHeadersMap].get(name.toLowerCase())?.value ?? null return value === undefined ? null : value.value
} }
* [Symbol.iterator] () { * [Symbol.iterator] () {
@ -189,6 +240,9 @@ class HeadersList {
// https://fetch.spec.whatwg.org/#headers-class // https://fetch.spec.whatwg.org/#headers-class
class Headers { class Headers {
constructor (init = undefined) { constructor (init = undefined) {
if (init === kConstruct) {
return
}
this[kHeadersList] = new HeadersList() this[kHeadersList] = new HeadersList()
// The new Headers(init) constructor steps are: // The new Headers(init) constructor steps are:
@ -212,43 +266,7 @@ class Headers {
name = webidl.converters.ByteString(name) name = webidl.converters.ByteString(name)
value = webidl.converters.ByteString(value) value = webidl.converters.ByteString(value)
// 1. Normalize value. return appendHeader(this, name, value)
value = headerValueNormalize(value)
// 2. If name is not a header name or value is not a
// header value, then throw a TypeError.
if (!isValidHeaderName(name)) {
throw webidl.errors.invalidArgument({
prefix: 'Headers.append',
value: name,
type: 'header name'
})
} else if (!isValidHeaderValue(value)) {
throw webidl.errors.invalidArgument({
prefix: 'Headers.append',
value,
type: 'header value'
})
}
// 3. If headerss guard is "immutable", then throw a TypeError.
// 4. Otherwise, if headerss guard is "request" and name is a
// forbidden header name, return.
// Note: undici does not implement forbidden header names
if (this[kGuard] === 'immutable') {
throw new TypeError('immutable')
} else if (this[kGuard] === 'request-no-cors') {
// 5. Otherwise, if headerss guard is "request-no-cors":
// TODO
}
// 6. Otherwise, if headerss guard is "response" and name is a
// forbidden response-header name, return.
// 7. Append (name, value) to headerss header list.
// 8. If headerss guard is "request-no-cors", then remove
// privileged no-CORS request headers from headers
return this[kHeadersList].append(name, value)
} }
// https://fetch.spec.whatwg.org/#dom-headers-delete // https://fetch.spec.whatwg.org/#dom-headers-delete
@ -293,7 +311,7 @@ class Headers {
// 7. Delete name from thiss header list. // 7. Delete name from thiss header list.
// 8. If thiss guard is "request-no-cors", then remove // 8. If thiss guard is "request-no-cors", then remove
// privileged no-CORS request headers from this. // privileged no-CORS request headers from this.
return this[kHeadersList].delete(name) this[kHeadersList].delete(name)
} }
// https://fetch.spec.whatwg.org/#dom-headers-get // https://fetch.spec.whatwg.org/#dom-headers-get
@ -386,7 +404,7 @@ class Headers {
// 7. Set (name, value) in thiss header list. // 7. Set (name, value) in thiss header list.
// 8. If thiss guard is "request-no-cors", then remove // 8. If thiss guard is "request-no-cors", then remove
// privileged no-CORS request headers from this // privileged no-CORS request headers from this
return this[kHeadersList].set(name, value) this[kHeadersList].set(name, value)
} }
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
@ -422,7 +440,8 @@ class Headers {
const cookies = this[kHeadersList].cookies const cookies = this[kHeadersList].cookies
// 3. For each name of names: // 3. For each name of names:
for (const [name, value] of names) { for (let i = 0; i < names.length; ++i) {
const [name, value] = names[i]
// 1. If name is `set-cookie`, then: // 1. If name is `set-cookie`, then:
if (name === 'set-cookie') { if (name === 'set-cookie') {
// 1. Let values be a list of all values of headers in list whose name // 1. Let values be a list of all values of headers in list whose name
@ -430,8 +449,8 @@ class Headers {
// 2. For each value of values: // 2. For each value of values:
// 1. Append (name, value) to headers. // 1. Append (name, value) to headers.
for (const value of cookies) { for (let j = 0; j < cookies.length; ++j) {
headers.push([name, value]) headers.push([name, cookies[j]])
} }
} else { } else {
// 2. Otherwise: // 2. Otherwise:
@ -455,6 +474,12 @@ class Headers {
keys () { keys () {
webidl.brandCheck(this, Headers) webidl.brandCheck(this, Headers)
if (this[kGuard] === 'immutable') {
const value = this[kHeadersSortedMap]
return makeIterator(() => value, 'Headers',
'key')
}
return makeIterator( return makeIterator(
() => [...this[kHeadersSortedMap].values()], () => [...this[kHeadersSortedMap].values()],
'Headers', 'Headers',
@ -465,6 +490,12 @@ class Headers {
values () { values () {
webidl.brandCheck(this, Headers) webidl.brandCheck(this, Headers)
if (this[kGuard] === 'immutable') {
const value = this[kHeadersSortedMap]
return makeIterator(() => value, 'Headers',
'value')
}
return makeIterator( return makeIterator(
() => [...this[kHeadersSortedMap].values()], () => [...this[kHeadersSortedMap].values()],
'Headers', 'Headers',
@ -475,6 +506,12 @@ class Headers {
entries () { entries () {
webidl.brandCheck(this, Headers) webidl.brandCheck(this, Headers)
if (this[kGuard] === 'immutable') {
const value = this[kHeadersSortedMap]
return makeIterator(() => value, 'Headers',
'key+value')
}
return makeIterator( return makeIterator(
() => [...this[kHeadersSortedMap].values()], () => [...this[kHeadersSortedMap].values()],
'Headers', 'Headers',

View File

@ -286,7 +286,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
} }
// 8. If responses timing allow passed flag is not set, then: // 8. If responses timing allow passed flag is not set, then:
if (!timingInfo.timingAllowPassed) { if (!response.timingAllowPassed) {
// 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
timingInfo = createOpaqueTimingInfo({ timingInfo = createOpaqueTimingInfo({
startTime: timingInfo.startTime startTime: timingInfo.startTime
@ -1203,6 +1203,9 @@ function httpRedirectFetch (fetchParams, response) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request.headersList.delete('authorization') request.headersList.delete('authorization')
// https://fetch.spec.whatwg.org/#authentication-entries
request.headersList.delete('proxy-authorization', true)
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request.headersList.delete('cookie') request.headersList.delete('cookie')
request.headersList.delete('host') request.headersList.delete('host')
@ -1957,7 +1960,7 @@ async function httpNetworkFetch (
path: url.pathname + url.search, path: url.pathname + url.search,
origin: url.origin, origin: url.origin,
method: request.method, method: request.method,
body: fetchParams.controller.dispatcher.isMockActive ? request.body && request.body.source : body, body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,
headers: request.headersList.entries, headers: request.headersList.entries,
maxRedirections: 0, maxRedirections: 0,
upgrade: request.mode === 'websocket' ? 'websocket' : undefined upgrade: request.mode === 'websocket' ? 'websocket' : undefined
@ -2002,7 +2005,7 @@ async function httpNetworkFetch (
location = val location = val
} }
headers.append(key, val) headers[kHeadersList].append(key, val)
} }
} else { } else {
const keys = Object.keys(headersList) const keys = Object.keys(headersList)
@ -2016,7 +2019,7 @@ async function httpNetworkFetch (
location = val location = val
} }
headers.append(key, val) headers[kHeadersList].append(key, val)
} }
} }
@ -2120,7 +2123,7 @@ async function httpNetworkFetch (
const key = headersList[n + 0].toString('latin1') const key = headersList[n + 0].toString('latin1')
const val = headersList[n + 1].toString('latin1') const val = headersList[n + 1].toString('latin1')
headers.append(key, val) headers[kHeadersList].append(key, val)
} }
resolve({ resolve({

View File

@ -10,7 +10,8 @@ const {
isValidHTTPToken, isValidHTTPToken,
sameOrigin, sameOrigin,
normalizeMethod, normalizeMethod,
makePolicyContainer makePolicyContainer,
normalizeMethodRecord
} = require('./util') } = require('./util')
const { const {
forbiddenMethodsSet, forbiddenMethodsSet,
@ -27,13 +28,12 @@ const { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols')
const { webidl } = require('./webidl') const { webidl } = require('./webidl')
const { getGlobalOrigin } = require('./global') const { getGlobalOrigin } = require('./global')
const { URLSerializer } = require('./dataURL') const { URLSerializer } = require('./dataURL')
const { kHeadersList } = require('../core/symbols') const { kHeadersList, kConstruct } = require('../core/symbols')
const assert = require('assert') const assert = require('assert')
const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events') const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events')
let TransformStream = globalThis.TransformStream let TransformStream = globalThis.TransformStream
const kInit = Symbol('init')
const kAbortController = Symbol('abortController') const kAbortController = Symbol('abortController')
const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
@ -44,7 +44,7 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
class Request { class Request {
// https://fetch.spec.whatwg.org/#dom-request // https://fetch.spec.whatwg.org/#dom-request
constructor (input, init = {}) { constructor (input, init = {}) {
if (input === kInit) { if (input === kConstruct) {
return return
} }
@ -183,8 +183,10 @@ class Request {
urlList: [...request.urlList] urlList: [...request.urlList]
}) })
const initHasKey = Object.keys(init).length !== 0
// 13. If init is not empty, then: // 13. If init is not empty, then:
if (Object.keys(init).length > 0) { if (initHasKey) {
// 1. If requests mode is "navigate", then set it to "same-origin". // 1. If requests mode is "navigate", then set it to "same-origin".
if (request.mode === 'navigate') { if (request.mode === 'navigate') {
request.mode = 'same-origin' request.mode = 'same-origin'
@ -299,7 +301,7 @@ class Request {
} }
// 23. If init["integrity"] exists, then set requests integrity metadata to it. // 23. If init["integrity"] exists, then set requests integrity metadata to it.
if (init.integrity !== undefined && init.integrity != null) { if (init.integrity != null) {
request.integrity = String(init.integrity) request.integrity = String(init.integrity)
} }
@ -315,16 +317,16 @@ class Request {
// 2. If method is not a method or method is a forbidden method, then // 2. If method is not a method or method is a forbidden method, then
// throw a TypeError. // throw a TypeError.
if (!isValidHTTPToken(init.method)) { if (!isValidHTTPToken(method)) {
throw TypeError(`'${init.method}' is not a valid HTTP method.`) throw new TypeError(`'${method}' is not a valid HTTP method.`)
} }
if (forbiddenMethodsSet.has(method.toUpperCase())) { if (forbiddenMethodsSet.has(method.toUpperCase())) {
throw TypeError(`'${init.method}' HTTP method is unsupported.`) throw new TypeError(`'${method}' HTTP method is unsupported.`)
} }
// 3. Normalize method. // 3. Normalize method.
method = normalizeMethod(init.method) method = normalizeMethodRecord[method] ?? normalizeMethod(method)
// 4. Set requests method to method. // 4. Set requests method to method.
request.method = method request.method = method
@ -395,7 +397,7 @@ class Request {
// 30. Set thiss headers to a new Headers object with thiss relevant // 30. Set thiss headers to a new Headers object with thiss relevant
// Realm, whose header list is requests header list and guard is // Realm, whose header list is requests header list and guard is
// "request". // "request".
this[kHeaders] = new Headers() this[kHeaders] = new Headers(kConstruct)
this[kHeaders][kHeadersList] = request.headersList this[kHeaders][kHeadersList] = request.headersList
this[kHeaders][kGuard] = 'request' this[kHeaders][kGuard] = 'request'
this[kHeaders][kRealm] = this[kRealm] this[kHeaders][kRealm] = this[kRealm]
@ -415,25 +417,25 @@ class Request {
} }
// 32. If init is not empty, then: // 32. If init is not empty, then:
if (Object.keys(init).length !== 0) { if (initHasKey) {
/** @type {HeadersList} */
const headersList = this[kHeaders][kHeadersList]
// 1. Let headers be a copy of thiss headers and its associated header // 1. Let headers be a copy of thiss headers and its associated header
// list. // list.
let headers = new Headers(this[kHeaders])
// 2. If init["headers"] exists, then set headers to init["headers"]. // 2. If init["headers"] exists, then set headers to init["headers"].
if (init.headers !== undefined) { const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)
headers = init.headers
}
// 3. Empty thiss headerss header list. // 3. Empty thiss headerss header list.
this[kHeaders][kHeadersList].clear() headersList.clear()
// 4. If headers is a Headers object, then for each header in its header // 4. If headers is a Headers object, then for each header in its header
// list, append headers name/headers value to thiss headers. // list, append headers name/headers value to thiss headers.
if (headers.constructor.name === 'Headers') { if (headers instanceof HeadersList) {
for (const [key, val] of headers) { for (const [key, val] of headers) {
this[kHeaders].append(key, val) headersList.append(key, val)
} }
// Note: Copy the `set-cookie` meta-data.
headersList.cookies = headers.cookies
} else { } else {
// 5. Otherwise, fill thiss headers with headers. // 5. Otherwise, fill thiss headers with headers.
fillHeaders(this[kHeaders], headers) fillHeaders(this[kHeaders], headers)
@ -722,10 +724,10 @@ class Request {
// 3. Let clonedRequestObject be the result of creating a Request object, // 3. Let clonedRequestObject be the result of creating a Request object,
// given clonedRequest, thiss headerss guard, and thiss relevant Realm. // given clonedRequest, thiss headerss guard, and thiss relevant Realm.
const clonedRequestObject = new Request(kInit) const clonedRequestObject = new Request(kConstruct)
clonedRequestObject[kState] = clonedRequest clonedRequestObject[kState] = clonedRequest
clonedRequestObject[kRealm] = this[kRealm] clonedRequestObject[kRealm] = this[kRealm]
clonedRequestObject[kHeaders] = new Headers() clonedRequestObject[kHeaders] = new Headers(kConstruct)
clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList
clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]
clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]

View File

@ -23,7 +23,7 @@ const { webidl } = require('./webidl')
const { FormData } = require('./formdata') const { FormData } = require('./formdata')
const { getGlobalOrigin } = require('./global') const { getGlobalOrigin } = require('./global')
const { URLSerializer } = require('./dataURL') const { URLSerializer } = require('./dataURL')
const { kHeadersList } = require('../core/symbols') const { kHeadersList, kConstruct } = require('../core/symbols')
const assert = require('assert') const assert = require('assert')
const { types } = require('util') const { types } = require('util')
@ -144,7 +144,7 @@ class Response {
// 2. Set thiss headers to a new Headers object with thiss relevant // 2. Set thiss headers to a new Headers object with thiss relevant
// Realm, whose header list is thiss responses header list and guard // Realm, whose header list is thiss responses header list and guard
// is "response". // is "response".
this[kHeaders] = new Headers() this[kHeaders] = new Headers(kConstruct)
this[kHeaders][kGuard] = 'response' this[kHeaders][kGuard] = 'response'
this[kHeaders][kHeadersList] = this[kState].headersList this[kHeaders][kHeadersList] = this[kState].headersList
this[kHeaders][kRealm] = this[kRealm] this[kHeaders][kRealm] = this[kRealm]
@ -514,11 +514,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V) {
return webidl.converters.Blob(V, { strict: false }) return webidl.converters.Blob(V, { strict: false })
} }
if ( if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {
types.isAnyArrayBuffer(V) ||
types.isTypedArray(V) ||
types.isDataView(V)
) {
return webidl.converters.BufferSource(V) return webidl.converters.BufferSource(V)
} }

242
node_modules/undici/lib/fetch/util.js generated vendored
View File

@ -7,14 +7,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util')
const assert = require('assert') const assert = require('assert')
const { isUint8Array } = require('util/types') const { isUint8Array } = require('util/types')
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */ /** @type {import('crypto')|undefined} */
let crypto let crypto
try { try {
crypto = require('crypto') crypto = require('crypto')
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch { } catch {
} }
function responseURL (response) { function responseURL (response) {
@ -103,52 +107,57 @@ function isValidReasonPhrase (statusText) {
return true return true
} }
function isTokenChar (c) { /**
return !( * @see https://tools.ietf.org/html/rfc7230#section-3.2.6
c >= 0x7f || * @param {number} c
c <= 0x20 || */
c === '(' || function isTokenCharCode (c) {
c === ')' || switch (c) {
c === '<' || case 0x22:
c === '>' || case 0x28:
c === '@' || case 0x29:
c === ',' || case 0x2c:
c === ';' || case 0x2f:
c === ':' || case 0x3a:
c === '\\' || case 0x3b:
c === '"' || case 0x3c:
c === '/' || case 0x3d:
c === '[' || case 0x3e:
c === ']' || case 0x3f:
c === '?' || case 0x40:
c === '=' || case 0x5b:
c === '{' || case 0x5c:
c === '}' case 0x5d:
) case 0x7b:
case 0x7d:
// DQUOTE and "(),/:;<=>?@[\]{}"
return false
default:
// VCHAR %x21-7E
return c >= 0x21 && c <= 0x7e
}
} }
// See RFC 7230, Section 3.2.6. /**
// https://github.com/chromium/chromium/blob/d7da0240cae77824d1eda25745c4022757499131/third_party/blink/renderer/platform/network/http_parsers.cc#L321 * @param {string} characters
*/
function isValidHTTPToken (characters) { function isValidHTTPToken (characters) {
if (!characters || typeof characters !== 'string') { if (characters.length === 0) {
return false return false
} }
for (let i = 0; i < characters.length; ++i) { for (let i = 0; i < characters.length; ++i) {
const c = characters.charCodeAt(i) if (!isTokenCharCode(characters.charCodeAt(i))) {
if (c > 0x7f || !isTokenChar(c)) {
return false return false
} }
} }
return true return true
} }
// https://fetch.spec.whatwg.org/#header-name /**
// https://github.com/chromium/chromium/blob/b3d37e6f94f87d59e44662d6078f6a12de845d17/net/http/http_util.cc#L342 * @see https://fetch.spec.whatwg.org/#header-name
* @param {string} potentialValue
*/
function isValidHeaderName (potentialValue) { function isValidHeaderName (potentialValue) {
if (potentialValue.length === 0) {
return false
}
return isValidHTTPToken(potentialValue) return isValidHTTPToken(potentialValue)
} }
@ -537,66 +546,56 @@ function bytesMatch (bytes, metadataList) {
return true return true
} }
// 3. If parsedMetadata is the empty set, return true. // 3. If response is not eligible for integrity validation, return false.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) { if (parsedMetadata.length === 0) {
return true return true
} }
// 4. Let metadata be the result of getting the strongest // 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata. // metadata from parsedMetadata.
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) const strongest = getStrongestMetadata(parsedMetadata)
// get the strongest algorithm const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
// 5. For each item in metadata: // 6. For each item in metadata:
for (const item of metadata) { for (const item of metadata) {
// 1. Let algorithm be the alg component of item. // 1. Let algorithm be the alg component of item.
const algorithm = item.algo const algorithm = item.algo
// 2. Let expectedValue be the val component of item. // 2. Let expectedValue be the val component of item.
let expectedValue = item.hash const expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec. // "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes. // 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue.endsWith('==')) { if (actualValue[actualValue.length - 1] === '=') {
actualValue = actualValue.slice(0, -2) if (actualValue[actualValue.length - 2] === '=') {
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
} }
// 4. If actualValue is a case-sensitive match for expectedValue, // 4. If actualValue is a case-sensitive match for expectedValue,
// return true. // return true.
if (actualValue === expectedValue) { if (compareBase64Mixed(actualValue, expectedValue)) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
return true return true
} }
} }
// 6. Return false. // 7. Return false.
return false return false
} }
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
/** /**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -610,8 +609,6 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true. // 2. Let empty be equal to true.
let empty = true let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces: // 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) { for (const token of metadata.split(' ')) {
// 1. Set empty to false. // 1. Set empty to false.
@ -621,7 +618,11 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token) const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token. // 3. If token does not parse, continue to the next token.
if (parsedToken === null || parsedToken.groups === undefined) { if (
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox // Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The // gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not // correct behavior is to ignore these, and subsequently not
@ -630,11 +631,11 @@ function parseMetadata (metadata) {
} }
// 4. Let algorithm be the hash-algo component of token. // 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo const algorithm = parsedToken.groups.algo.toLowerCase()
// 5. If algorithm is a hash function recognized by the user // 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result. // agent, add the parsed token to result.
if (supportedHashes.includes(algorithm.toLowerCase())) { if (supportedHashes.includes(algorithm)) {
result.push(parsedToken.groups) result.push(parsedToken.groups)
} }
} }
@ -647,6 +648,82 @@ function parseMetadata (metadata) {
return result return result
} }
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO // TODO
@ -693,11 +770,30 @@ function isCancelled (fetchParams) {
fetchParams.controller.state === 'terminated' fetchParams.controller.state === 'terminated'
} }
// https://fetch.spec.whatwg.org/#concept-method-normalize const normalizeMethodRecord = {
delete: 'DELETE',
DELETE: 'DELETE',
get: 'GET',
GET: 'GET',
head: 'HEAD',
HEAD: 'HEAD',
options: 'OPTIONS',
OPTIONS: 'OPTIONS',
post: 'POST',
POST: 'POST',
put: 'PUT',
PUT: 'PUT'
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(normalizeMethodRecord, null)
/**
* @see https://fetch.spec.whatwg.org/#concept-method-normalize
* @param {string} method
*/
function normalizeMethod (method) { function normalizeMethod (method) {
return /^(DELETE|GET|HEAD|OPTIONS|POST|PUT)$/i.test(method) return normalizeMethodRecord[method.toLowerCase()] ?? method
? method.toUpperCase()
: method
} }
// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string // https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
@ -1042,5 +1138,7 @@ module.exports = {
urlIsLocal, urlIsLocal,
urlHasHttpsScheme, urlHasHttpsScheme,
urlIsHttpHttpsScheme, urlIsHttpHttpsScheme,
readAllBytes readAllBytes,
normalizeMethodRecord,
parseMetadata
} }

View File

@ -427,12 +427,10 @@ webidl.converters.ByteString = function (V) {
// 2. If the value of any element of x is greater than // 2. If the value of any element of x is greater than
// 255, then throw a TypeError. // 255, then throw a TypeError.
for (let index = 0; index < x.length; index++) { for (let index = 0; index < x.length; index++) {
const charCode = x.charCodeAt(index) if (x.charCodeAt(index) > 255) {
if (charCode > 255) {
throw new TypeError( throw new TypeError(
'Cannot convert argument to a ByteString because the character at ' + 'Cannot convert argument to a ByteString because the character at ' +
`index ${index} has a value of ${charCode} which is greater than 255.` `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`
) )
} }
} }

View File

@ -184,12 +184,17 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4 // https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) { function shouldRemoveHeader (header, removeContent, unknownOrigin) {
return ( if (header.length === 4) {
(header.length === 4 && header.toString().toLowerCase() === 'host') || return util.headerNameToString(header) === 'host'
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || }
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || if (removeContent && util.headerNameToString(header).startsWith('content-')) {
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') return true
) }
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
} }
// https://tools.ietf.org/html/rfc7231#section-6.4 // https://tools.ietf.org/html/rfc7231#section-6.4

View File

@ -65,6 +65,9 @@ class ProxyAgent extends DispatcherBase {
this[kProxyTls] = opts.proxyTls this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {} this[kProxyHeaders] = opts.headers || {}
const resolvedUrl = new URL(opts.uri)
const { origin, port, host, username, password } = resolvedUrl
if (opts.auth && opts.token) { if (opts.auth && opts.token) {
throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
} else if (opts.auth) { } else if (opts.auth) {
@ -72,11 +75,10 @@ class ProxyAgent extends DispatcherBase {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
} else if (opts.token) { } else if (opts.token) {
this[kProxyHeaders]['proxy-authorization'] = opts.token this[kProxyHeaders]['proxy-authorization'] = opts.token
} else if (username && password) {
this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
} }
const resolvedUrl = new URL(opts.uri)
const { origin, port, host } = resolvedUrl
const connect = buildConnector({ ...opts.proxyTls }) const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
this[kClient] = clientFactory(resolvedUrl, { connect }) this[kClient] = clientFactory(resolvedUrl, { connect })
@ -100,7 +102,7 @@ class ProxyAgent extends DispatcherBase {
}) })
if (statusCode !== 200) { if (statusCode !== 200) {
socket.on('error', () => {}).destroy() socket.on('error', () => {}).destroy()
callback(new RequestAbortedError('Proxy response !== 200 when HTTP Tunneling')) callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
} }
if (opts.protocol !== 'https:') { if (opts.protocol !== 'https:') {
callback(null, socket) callback(null, socket)

7
node_modules/undici/package.json generated vendored
View File

@ -1,6 +1,6 @@
{ {
"name": "undici", "name": "undici",
"version": "5.27.2", "version": "5.28.4",
"description": "An HTTP/1.1 client, written from scratch for Node.js", "description": "An HTTP/1.1 client, written from scratch for Node.js",
"homepage": "https://undici.nodejs.org", "homepage": "https://undici.nodejs.org",
"bugs": { "bugs": {
@ -115,15 +115,16 @@
"husky": "^8.0.1", "husky": "^8.0.1",
"import-fresh": "^3.3.0", "import-fresh": "^3.3.0",
"jest": "^29.0.2", "jest": "^29.0.2",
"jsdom": "^22.1.0", "jsdom": "^23.0.0",
"jsfuzz": "^1.0.15", "jsfuzz": "^1.0.15",
"mocha": "^10.0.0", "mocha": "^10.0.0",
"mockttp": "^3.9.2",
"p-timeout": "^3.2.0", "p-timeout": "^3.2.0",
"pre-commit": "^1.2.2", "pre-commit": "^1.2.2",
"proxy": "^1.0.2", "proxy": "^1.0.2",
"proxyquire": "^2.1.3", "proxyquire": "^2.1.3",
"semver": "^7.5.4", "semver": "^7.5.4",
"sinon": "^16.1.0", "sinon": "^17.0.1",
"snazzy": "^9.0.0", "snazzy": "^9.0.0",
"standard": "^17.0.0", "standard": "^17.0.0",
"table": "^6.8.0", "table": "^6.8.0",

View File

@ -77,7 +77,7 @@ export declare namespace Client {
*/ */
allowH2?: boolean; allowH2?: boolean;
/** /**
* @description Dictates the maximum number of concurrent streams for a single H2 session. It can be overriden by a SETTINGS remote frame. * @description Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame.
* @default 100 * @default 100
*/ */
maxConcurrentStreams?: number maxConcurrentStreams?: number

View File

@ -211,7 +211,7 @@ declare namespace Dispatcher {
/** Invoked when request is upgraded either due to a `Upgrade` header or `CONNECT` method. */ /** Invoked when request is upgraded either due to a `Upgrade` header or `CONNECT` method. */
onUpgrade?(statusCode: number, headers: Buffer[] | string[] | null, socket: Duplex): void; onUpgrade?(statusCode: number, headers: Buffer[] | string[] | null, socket: Duplex): void;
/** Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. */ /** Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. */
onHeaders?(statusCode: number, headers: Buffer[] | string[] | null, resume: () => void): boolean; onHeaders?(statusCode: number, headers: Buffer[] | string[] | null, resume: () => void, statusText: string): boolean;
/** Invoked when response payload data is received. */ /** Invoked when response payload data is received. */
onData?(chunk: Buffer): boolean; onData?(chunk: Buffer): boolean;
/** Invoked when response payload and trailers have been received and the request has completed. */ /** Invoked when response payload and trailers have been received and the request has completed. */

View File

@ -108,7 +108,7 @@ export interface RequestInit {
body?: BodyInit body?: BodyInit
redirect?: RequestRedirect redirect?: RequestRedirect
integrity?: string integrity?: string
signal?: AbortSignal signal?: AbortSignal | null
credentials?: RequestCredentials credentials?: RequestCredentials
mode?: RequestMode mode?: RequestMode
referrer?: string referrer?: string

View File

@ -14,6 +14,7 @@ import MockPool from'./mock-pool'
import MockAgent from'./mock-agent' import MockAgent from'./mock-agent'
import mockErrors from'./mock-errors' import mockErrors from'./mock-errors'
import ProxyAgent from'./proxy-agent' import ProxyAgent from'./proxy-agent'
import RetryHandler from'./retry-handler'
import { request, pipeline, stream, connect, upgrade } from './api' import { request, pipeline, stream, connect, upgrade } from './api'
export * from './cookies' export * from './cookies'
@ -27,7 +28,7 @@ export * from './content-type'
export * from './cache' export * from './cache'
export { Interceptable } from './mock-interceptor' export { Interceptable } from './mock-interceptor'
export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler } export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler, RetryHandler }
export default Undici export default Undici
declare namespace Undici { declare namespace Undici {
@ -35,6 +36,7 @@ declare namespace Undici {
var Pool: typeof import('./pool').default; var Pool: typeof import('./pool').default;
var RedirectHandler: typeof import ('./handlers').RedirectHandler var RedirectHandler: typeof import ('./handlers').RedirectHandler
var DecoratorHandler: typeof import ('./handlers').DecoratorHandler var DecoratorHandler: typeof import ('./handlers').DecoratorHandler
var RetryHandler: typeof import ('./retry-handler').default
var createRedirectInterceptor: typeof import ('./interceptors').createRedirectInterceptor var createRedirectInterceptor: typeof import ('./interceptors').createRedirectInterceptor
var BalancedPool: typeof import('./balanced-pool').default; var BalancedPool: typeof import('./balanced-pool').default;
var Client: typeof import('./client').default; var Client: typeof import('./client').default;

116
node_modules/undici/types/retry-handler.d.ts generated vendored Normal file
View File

@ -0,0 +1,116 @@
import Dispatcher from "./dispatcher";
export default RetryHandler;
declare class RetryHandler implements Dispatcher.DispatchHandlers {
constructor(
options: Dispatcher.DispatchOptions & {
retryOptions?: RetryHandler.RetryOptions;
},
retryHandlers: RetryHandler.RetryHandlers
);
}
declare namespace RetryHandler {
export type RetryState = { counter: number; currentTimeout: number };
export type RetryContext = {
state: RetryState;
opts: Dispatcher.DispatchOptions & {
retryOptions?: RetryHandler.RetryOptions;
};
}
export type OnRetryCallback = (result?: Error | null) => void;
export type RetryCallback = (
err: Error,
context: {
state: RetryState;
opts: Dispatcher.DispatchOptions & {
retryOptions?: RetryHandler.RetryOptions;
};
},
callback: OnRetryCallback
) => number | null;
export interface RetryOptions {
/**
* Callback to be invoked on every retry iteration.
* It receives the error, current state of the retry object and the options object
* passed when instantiating the retry handler.
*
* @type {RetryCallback}
* @memberof RetryOptions
*/
retry?: RetryCallback;
/**
* Maximum number of retries to allow.
*
* @type {number}
* @memberof RetryOptions
* @default 5
*/
maxRetries?: number;
/**
* Max number of milliseconds allow between retries
*
* @type {number}
* @memberof RetryOptions
* @default 30000
*/
maxTimeout?: number;
/**
* Initial number of milliseconds to wait before retrying for the first time.
*
* @type {number}
* @memberof RetryOptions
* @default 500
*/
minTimeout?: number;
/**
* Factior to multiply the timeout factor between retries.
*
* @type {number}
* @memberof RetryOptions
* @default 2
*/
timeoutFactor?: number;
/**
* It enables to automatically infer timeout between retries based on the `Retry-After` header.
*
* @type {boolean}
* @memberof RetryOptions
* @default true
*/
retryAfter?: boolean;
/**
* HTTP methods to retry.
*
* @type {Dispatcher.HttpMethod[]}
* @memberof RetryOptions
* @default ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
*/
methods?: Dispatcher.HttpMethod[];
/**
* Error codes to be retried. e.g. `ECONNRESET`, `ENOTFOUND`, `ETIMEDOUT`, `ECONNREFUSED`, etc.
*
* @type {string[]}
* @default ['ECONNRESET','ECONNREFUSED','ENOTFOUND','ENETDOWN','ENETUNREACH','EHOSTDOWN','EHOSTUNREACH','EPIPE']
*/
errorCodes?: string[];
/**
* HTTP status codes to be retried.
*
* @type {number[]}
* @memberof RetryOptions
* @default [500, 502, 503, 504, 429],
*/
statusCodes?: number[];
}
export interface RetryHandlers {
dispatch: Dispatcher["dispatch"];
handler: Dispatcher.DispatchHandlers;
}
}

18
package-lock.json generated
View File

@ -21,18 +21,18 @@
} }
}, },
"node_modules/@actions/http-client": { "node_modules/@actions/http-client": {
"version": "2.2.0", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz",
"integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==",
"dependencies": { "dependencies": {
"tunnel": "^0.0.6", "tunnel": "^0.0.6",
"undici": "^5.25.4" "undici": "^5.25.4"
} }
}, },
"node_modules/@fastify/busboy": { "node_modules/@fastify/busboy": {
"version": "2.1.0", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.0.tgz", "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
"integrity": "sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==", "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
"engines": { "engines": {
"node": ">=14" "node": ">=14"
} }
@ -46,9 +46,9 @@
} }
}, },
"node_modules/undici": { "node_modules/undici": {
"version": "5.27.2", "version": "5.28.4",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.27.2.tgz", "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
"integrity": "sha512-iS857PdOEy/y3wlM3yRp+6SNQQ6xU0mmZcwRSriqk+et/cwWAtwmIGf6WkoDN2EK/AMdCO/dfXzIwi+rFMrjjQ==", "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
"dependencies": { "dependencies": {
"@fastify/busboy": "^2.0.0" "@fastify/busboy": "^2.0.0"
}, },