fix(handlers): create burst handler for interaction callbacks (#8996)

* fix(handlers): create burst handler for interaction callbacks

* docs: use remarks instead of info block

Co-Authored-By: Almeida <almeidx@pm.me>

* refactor: move code duplication to shared handler

Co-authored-by: Jiralite <33201955+Jiralite@users.noreply.github.com>

* Update packages/rest/src/lib/handlers/BurstHandler.ts

---------

Co-authored-by: Almeida <almeidx@pm.me>
Co-authored-by: Jiralite <33201955+Jiralite@users.noreply.github.com>
Co-authored-by: Vlad Frangu <kingdgrizzle@gmail.com>
Co-authored-by: Aura Román <kyradiscord@gmail.com>
This commit is contained in:
ckohen
2023-03-30 10:22:04 -07:00
committed by GitHub
parent 984bd55b43
commit db8df104c5
9 changed files with 511 additions and 124 deletions

View File

@@ -0,0 +1,139 @@
/* eslint-disable id-length */
/* eslint-disable promise/prefer-await-to-then */
import { performance } from 'node:perf_hooks';
import { MockAgent, setGlobalDispatcher } from 'undici';
import type { Interceptable, MockInterceptor } from 'undici/types/mock-interceptor';
import { beforeEach, afterEach, test, expect, vitest } from 'vitest';
import { DiscordAPIError, HTTPError, RateLimitError, REST, BurstHandlerMajorIdKey } from '../src/index.js';
import { BurstHandler } from '../src/lib/handlers/BurstHandler.js';
import { genPath } from './util.js';
const callbackKey = `Global(POST:/interactions/:id/:token/callback):${BurstHandlerMajorIdKey}`;
const callbackPath = new RegExp(genPath('/interactions/[0-9]{17,19}/.+/callback'));
const api = new REST();
let mockAgent: MockAgent;
let mockPool: Interceptable;
beforeEach(() => {
mockAgent = new MockAgent();
mockAgent.disableNetConnect();
setGlobalDispatcher(mockAgent);
mockPool = mockAgent.get('https://discord.com');
api.setAgent(mockAgent);
});
afterEach(async () => {
await mockAgent.close();
});
// @discordjs/rest uses the `content-type` header to detect whether to parse
// the response as JSON or as an ArrayBuffer.
const responseOptions: MockInterceptor.MockResponseOptions = {
headers: {
'content-type': 'application/json',
},
};
test('Interaction callback creates burst handler', async () => {
mockPool.intercept({ path: callbackPath, method: 'POST' }).reply(200);
expect(api.requestManager.handlers.get(callbackKey)).toBe(undefined);
expect(
await api.post('/interactions/1234567890123456789/totallyarealtoken/callback', {
auth: false,
body: { type: 4, data: { content: 'Reply' } },
}),
).toBeInstanceOf(Uint8Array);
expect(api.requestManager.handlers.get(callbackKey)).toBeInstanceOf(BurstHandler);
});
test('Requests are handled in bursts', async () => {
mockPool.intercept({ path: callbackPath, method: 'POST' }).reply(200).delay(100).times(3);
// Return the current time on these results as their response does not indicate anything
const [a, b, c] = await Promise.all([
api
.post('/interactions/1234567890123456789/totallyarealtoken/callback', {
auth: false,
body: { type: 4, data: { content: 'Reply1' } },
})
.then(() => performance.now()),
api
.post('/interactions/2345678901234567890/anotherveryrealtoken/callback', {
auth: false,
body: { type: 4, data: { content: 'Reply2' } },
})
.then(() => performance.now()),
api
.post('/interactions/3456789012345678901/nowaytheresanotherone/callback', {
auth: false,
body: { type: 4, data: { content: 'Reply3' } },
})
.then(() => performance.now()),
]);
expect(b - a).toBeLessThan(10);
expect(c - a).toBeLessThan(10);
});
test('Handle 404', async () => {
mockPool
.intercept({ path: callbackPath, method: 'POST' })
.reply(404, { message: 'Unknown interaction', code: 10_062 }, responseOptions);
const promise = api.post('/interactions/1234567890123456788/definitelynotarealinteraction/callback', {
auth: false,
body: { type: 4, data: { content: 'Malicious' } },
});
await expect(promise).rejects.toThrowError('Unknown interaction');
await expect(promise).rejects.toBeInstanceOf(DiscordAPIError);
});
let unexpected429 = true;
test('Handle unexpected 429', async () => {
mockPool
.intercept({
path: callbackPath,
method: 'POST',
})
.reply(() => {
if (unexpected429) {
unexpected429 = false;
return {
statusCode: 429,
data: '',
responseOptions: {
headers: {
'retry-after': '1',
via: '1.1 google',
},
},
};
}
return {
statusCode: 200,
data: { test: true },
responseOptions,
};
})
.times(2);
const previous = performance.now();
let firstResolvedTime: number;
const unexpectedLimit = api
.post('/interactions/1234567890123456789/totallyarealtoken/callback', {
auth: false,
body: { type: 4, data: { content: 'Reply' } },
})
.then((res) => {
firstResolvedTime = performance.now();
return res;
});
expect(await unexpectedLimit).toStrictEqual({ test: true });
expect(performance.now()).toBeGreaterThanOrEqual(previous + 1_000);
});

View File

@@ -1,15 +1,17 @@
import { Buffer } from 'node:buffer'; import { Buffer, File as NativeFile } from 'node:buffer';
import { URLSearchParams } from 'node:url'; import { URLSearchParams } from 'node:url';
import { DiscordSnowflake } from '@sapphire/snowflake'; import { DiscordSnowflake } from '@sapphire/snowflake';
import type { Snowflake } from 'discord-api-types/v10'; import type { Snowflake } from 'discord-api-types/v10';
import { Routes } from 'discord-api-types/v10'; import { Routes } from 'discord-api-types/v10';
import type { FormData } from 'undici'; import type { FormData } from 'undici';
import { File, MockAgent, setGlobalDispatcher } from 'undici'; import { File as UndiciFile, MockAgent, setGlobalDispatcher } from 'undici';
import type { Interceptable, MockInterceptor } from 'undici/types/mock-interceptor'; import type { Interceptable, MockInterceptor } from 'undici/types/mock-interceptor';
import { beforeEach, afterEach, test, expect } from 'vitest'; import { beforeEach, afterEach, test, expect } from 'vitest';
import { REST } from '../src/index.js'; import { REST } from '../src/index.js';
import { genPath } from './util.js'; import { genPath } from './util.js';
const File = NativeFile ?? UndiciFile;
const newSnowflake: Snowflake = DiscordSnowflake.generate().toString(); const newSnowflake: Snowflake = DiscordSnowflake.generate().toString();
const api = new REST().setToken('A-Very-Fake-Token'); const api = new REST().setToken('A-Very-Fake-Token');

View File

@@ -7,9 +7,16 @@ import { lazy } from '@discordjs/util';
import { DiscordSnowflake } from '@sapphire/snowflake'; import { DiscordSnowflake } from '@sapphire/snowflake';
import { FormData, type RequestInit, type BodyInit, type Dispatcher, type Agent } from 'undici'; import { FormData, type RequestInit, type BodyInit, type Dispatcher, type Agent } from 'undici';
import type { RESTOptions, RestEvents, RequestOptions } from './REST.js'; import type { RESTOptions, RestEvents, RequestOptions } from './REST.js';
import { BurstHandler } from './handlers/BurstHandler.js';
import type { IHandler } from './handlers/IHandler.js'; import type { IHandler } from './handlers/IHandler.js';
import { SequentialHandler } from './handlers/SequentialHandler.js'; import { SequentialHandler } from './handlers/SequentialHandler.js';
import { DefaultRestOptions, DefaultUserAgent, OverwrittenMimeTypes, RESTEvents } from './utils/constants.js'; import {
BurstHandlerMajorIdKey,
DefaultRestOptions,
DefaultUserAgent,
OverwrittenMimeTypes,
RESTEvents,
} from './utils/constants.js';
import { resolveBody } from './utils/utils.js'; import { resolveBody } from './utils/utils.js';
// Make this a lazy dynamic import as file-type is a pure ESM package // Make this a lazy dynamic import as file-type is a pure ESM package
@@ -351,7 +358,10 @@ export class RequestManager extends EventEmitter {
*/ */
private createHandler(hash: string, majorParameter: string) { private createHandler(hash: string, majorParameter: string) {
// Create the async request queue to handle requests // Create the async request queue to handle requests
const queue = new SequentialHandler(this, hash, majorParameter); const queue =
majorParameter === BurstHandlerMajorIdKey
? new BurstHandler(this, hash, majorParameter)
: new SequentialHandler(this, hash, majorParameter);
// Save the queue based on its id // Save the queue based on its id
this.handlers.set(queue.id, queue); this.handlers.set(queue.id, queue);
@@ -499,6 +509,14 @@ export class RequestManager extends EventEmitter {
* @internal * @internal
*/ */
private static generateRouteData(endpoint: RouteLike, method: RequestMethod): RouteData { private static generateRouteData(endpoint: RouteLike, method: RequestMethod): RouteData {
if (endpoint.startsWith('/interactions/') && endpoint.endsWith('/callback')) {
return {
majorParameter: BurstHandlerMajorIdKey,
bucketRoute: '/interactions/:id/:token/callback',
original: endpoint,
};
}
const majorIdMatch = /^\/(?:channels|guilds|webhooks)\/(\d{17,19})/.exec(endpoint); const majorIdMatch = /^\/(?:channels|guilds|webhooks)\/(\d{17,19})/.exec(endpoint);
// Get the major id for this route - global otherwise // Get the major id for this route - global otherwise

View File

@@ -0,0 +1,146 @@
import { setTimeout as sleep } from 'node:timers/promises';
import type { Dispatcher } from 'undici';
import type { RequestOptions } from '../REST.js';
import type { HandlerRequestData, RequestManager, RouteData } from '../RequestManager.js';
import { RESTEvents } from '../utils/constants.js';
import { onRateLimit, parseHeader } from '../utils/utils.js';
import type { IHandler } from './IHandler.js';
import { handleErrors, incrementInvalidCount, makeNetworkRequest } from './Shared.js';
/**
* The structure used to handle burst requests for a given bucket.
* Burst requests have no ratelimit handling but allow for pre- and post-processing
* of data in the same manner as sequentially queued requests.
*
* @remarks
* This queue may still emit a rate limit error if an unexpected 429 is hit
*/
export class BurstHandler implements IHandler {
/**
* {@inheritdoc IHandler.id}
*/
public readonly id: string;
/**
* {@inheritDoc IHandler.inactive}
*/
public inactive = false;
/**
* @param manager - The request manager
* @param hash - The hash that this RequestHandler handles
* @param majorParameter - The major parameter for this handler
*/
public constructor(
private readonly manager: RequestManager,
private readonly hash: string,
private readonly majorParameter: string,
) {
this.id = `${hash}:${majorParameter}`;
}
/**
* Emits a debug message
*
* @param message - The message to debug
*/
private debug(message: string) {
this.manager.emit(RESTEvents.Debug, `[REST ${this.id}] ${message}`);
}
/**
* {@inheritDoc IHandler.queueRequest}
*/
public async queueRequest(
routeId: RouteData,
url: string,
options: RequestOptions,
requestData: HandlerRequestData,
): Promise<Dispatcher.ResponseData> {
return this.runRequest(routeId, url, options, requestData);
}
/**
* The method that actually makes the request to the API, and updates info about the bucket accordingly
*
* @param routeId - The generalized API route with literal ids for major parameters
* @param url - The fully resolved URL to make the request to
* @param options - The fetch options needed to make the request
* @param requestData - Extra data from the user's request needed for errors and additional processing
* @param retries - The number of retries this request has already attempted (recursion)
*/
private async runRequest(
routeId: RouteData,
url: string,
options: RequestOptions,
requestData: HandlerRequestData,
retries = 0,
): Promise<Dispatcher.ResponseData> {
const method = options.method ?? 'get';
const res = await makeNetworkRequest(this.manager, routeId, url, options, requestData, retries);
// Retry requested
if (res === null) {
// eslint-disable-next-line no-param-reassign
return this.runRequest(routeId, url, options, requestData, ++retries);
}
const status = res.statusCode;
let retryAfter = 0;
const retry = parseHeader(res.headers['retry-after']);
// Amount of time in milliseconds until we should retry if rate limited (globally or otherwise)
if (retry) retryAfter = Number(retry) * 1_000 + this.manager.options.offset;
// Count the invalid requests
if (status === 401 || status === 403 || status === 429) {
incrementInvalidCount(this.manager);
}
if (status >= 200 && status < 300) {
return res;
} else if (status === 429) {
// Unexpected ratelimit
const isGlobal = res.headers['x-ratelimit-global'] !== undefined;
await onRateLimit(this.manager, {
timeToReset: retryAfter,
limit: Number.POSITIVE_INFINITY,
method,
hash: this.hash,
url,
route: routeId.bucketRoute,
majorParameter: this.majorParameter,
global: isGlobal,
});
this.debug(
[
'Encountered unexpected 429 rate limit',
` Global : ${isGlobal}`,
` Method : ${method}`,
` URL : ${url}`,
` Bucket : ${routeId.bucketRoute}`,
` Major parameter: ${routeId.majorParameter}`,
` Hash : ${this.hash}`,
` Limit : ${Number.POSITIVE_INFINITY}`,
` Retry After : ${retryAfter}ms`,
` Sublimit : None`,
].join('\n'),
);
// We are bypassing all other limits, but an encountered limit should be respected (it's probably a non-punished rate limit anyways)
await sleep(retryAfter);
// Since this is not a server side issue, the next request should pass, so we don't bump the retries counter
return this.runRequest(routeId, url, options, requestData, retries);
} else {
const handled = await handleErrors(this.manager, res, method, url, requestData, retries);
if (handled === null) {
// eslint-disable-next-line no-param-reassign
return this.runRequest(routeId, url, options, requestData, ++retries);
}
return handled;
}
}
}

View File

@@ -26,3 +26,9 @@ export interface IHandler {
requestData: HandlerRequestData, requestData: HandlerRequestData,
): Promise<Dispatcher.ResponseData>; ): Promise<Dispatcher.ResponseData>;
} }
export interface PolyFillAbortSignal {
readonly aborted: boolean;
addEventListener(type: 'abort', listener: () => void): void;
removeEventListener(type: 'abort', listener: () => void): void;
}

View File

@@ -1,25 +1,12 @@
import { setTimeout, clearTimeout } from 'node:timers';
import { setTimeout as sleep } from 'node:timers/promises'; import { setTimeout as sleep } from 'node:timers/promises';
import { AsyncQueue } from '@sapphire/async-queue'; import { AsyncQueue } from '@sapphire/async-queue';
import { request, type Dispatcher } from 'undici'; import type { Dispatcher } from 'undici';
import type { RateLimitData, RequestOptions } from '../REST'; import type { RateLimitData, RequestOptions } from '../REST';
import type { HandlerRequestData, RequestManager, RouteData } from '../RequestManager'; import type { HandlerRequestData, RequestManager, RouteData } from '../RequestManager';
import { DiscordAPIError, type DiscordErrorData, type OAuthErrorData } from '../errors/DiscordAPIError.js';
import { HTTPError } from '../errors/HTTPError.js';
import { RateLimitError } from '../errors/RateLimitError.js';
import { RESTEvents } from '../utils/constants.js'; import { RESTEvents } from '../utils/constants.js';
import { hasSublimit, parseHeader, parseResponse, shouldRetry } from '../utils/utils.js'; import { hasSublimit, onRateLimit, parseHeader } from '../utils/utils.js';
import type { IHandler } from './IHandler.js'; import type { IHandler } from './IHandler.js';
import { handleErrors, incrementInvalidCount, makeNetworkRequest } from './Shared.js';
/**
* Invalid request limiting is done on a per-IP basis, not a per-token basis.
* The best we can do is track invalid counts process-wide (on the theory that
* users could have multiple bots run from one process) rather than per-bot.
* Therefore, store these at file scope here rather than in the client's
* RESTManager object.
*/
let invalidCount = 0;
let invalidCountResetTime: number | null = null;
const enum QueueType { const enum QueueType {
Standard, Standard,
@@ -27,7 +14,7 @@ const enum QueueType {
} }
/** /**
* The structure used to handle requests for a given bucket * The structure used to handle sequential requests for a given bucket
*/ */
export class SequentialHandler implements IHandler { export class SequentialHandler implements IHandler {
/** /**
@@ -141,22 +128,6 @@ export class SequentialHandler implements IHandler {
this.manager.globalDelay = null; this.manager.globalDelay = null;
} }
/*
* Determines whether the request should be queued or whether a RateLimitError should be thrown
*/
private async onRateLimit(rateLimitData: RateLimitData) {
const { options } = this.manager;
if (!options.rejectOnRateLimit) return;
const shouldThrow =
typeof options.rejectOnRateLimit === 'function'
? await options.rejectOnRateLimit(rateLimitData)
: options.rejectOnRateLimit.some((route) => rateLimitData.route.startsWith(route.toLowerCase()));
if (shouldThrow) {
throw new RateLimitError(rateLimitData);
}
}
/** /**
* {@inheritDoc IHandler.queueRequest} * {@inheritDoc IHandler.queueRequest}
*/ */
@@ -269,7 +240,7 @@ export class SequentialHandler implements IHandler {
// Let library users know they have hit a rate limit // Let library users know they have hit a rate limit
this.manager.emit(RESTEvents.RateLimited, rateLimitData); this.manager.emit(RESTEvents.RateLimited, rateLimitData);
// Determine whether a RateLimitError should be thrown // Determine whether a RateLimitError should be thrown
await this.onRateLimit(rateLimitData); await onRateLimit(this.manager, rateLimitData);
// When not erroring, emit debug for what is happening // When not erroring, emit debug for what is happening
if (isGlobal) { if (isGlobal) {
this.debug(`Global rate limit hit, blocking all requests for ${timeout}ms`); this.debug(`Global rate limit hit, blocking all requests for ${timeout}ms`);
@@ -291,47 +262,12 @@ export class SequentialHandler implements IHandler {
const method = options.method ?? 'get'; const method = options.method ?? 'get';
const controller = new AbortController(); const res = await makeNetworkRequest(this.manager, routeId, url, options, requestData, retries);
const timeout = setTimeout(() => controller.abort(), this.manager.options.timeout).unref();
if (requestData.signal) {
// The type polyfill is required because Node.js's types are incomplete.
const signal = requestData.signal as PolyFillAbortSignal;
// If the user signal was aborted, abort the controller, else abort the local signal.
// The reason why we don't re-use the user's signal, is because users may use the same signal for multiple
// requests, and we do not want to cause unexpected side-effects.
if (signal.aborted) controller.abort();
else signal.addEventListener('abort', () => controller.abort());
}
let res: Dispatcher.ResponseData; // Retry requested
try { if (res === null) {
res = await request(url, { ...options, signal: controller.signal }); // eslint-disable-next-line no-param-reassign
} catch (error: unknown) { return this.runRequest(routeId, url, options, requestData, ++retries);
if (!(error instanceof Error)) throw error;
// Retry the specified number of times if needed
if (shouldRetry(error) && retries !== this.manager.options.retries) {
// eslint-disable-next-line no-param-reassign
return await this.runRequest(routeId, url, options, requestData, ++retries);
}
throw error;
} finally {
clearTimeout(timeout);
}
if (this.manager.listenerCount(RESTEvents.Response)) {
this.manager.emit(
RESTEvents.Response,
{
method,
path: routeId.original,
route: routeId.bucketRoute,
options,
data: requestData,
retries,
},
{ ...res },
);
} }
const status = res.statusCode; const status = res.statusCode;
@@ -388,23 +324,7 @@ export class SequentialHandler implements IHandler {
// Count the invalid requests // Count the invalid requests
if (status === 401 || status === 403 || status === 429) { if (status === 401 || status === 403 || status === 429) {
if (!invalidCountResetTime || invalidCountResetTime < Date.now()) { incrementInvalidCount(this.manager);
invalidCountResetTime = Date.now() + 1_000 * 60 * 10;
invalidCount = 0;
}
invalidCount++;
const emitInvalid =
this.manager.options.invalidRequestWarningInterval > 0 &&
invalidCount % this.manager.options.invalidRequestWarningInterval === 0;
if (emitInvalid) {
// Let library users know periodically about invalid requests
this.manager.emit(RESTEvents.InvalidRequestWarning, {
count: invalidCount,
remainingTime: invalidCountResetTime - Date.now(),
});
}
} }
if (status >= 200 && status < 300) { if (status >= 200 && status < 300) {
@@ -425,7 +345,7 @@ export class SequentialHandler implements IHandler {
timeout = this.timeToReset; timeout = this.timeToReset;
} }
await this.onRateLimit({ await onRateLimit(this.manager, {
timeToReset: timeout, timeToReset: timeout,
limit, limit,
method, method,
@@ -475,36 +395,14 @@ export class SequentialHandler implements IHandler {
// Since this is not a server side issue, the next request should pass, so we don't bump the retries counter // Since this is not a server side issue, the next request should pass, so we don't bump the retries counter
return this.runRequest(routeId, url, options, requestData, retries); return this.runRequest(routeId, url, options, requestData, retries);
} else if (status >= 500 && status < 600) { } else {
// Retry the specified number of times for possible server side issues const handled = await handleErrors(this.manager, res, method, url, requestData, retries);
if (retries !== this.manager.options.retries) { if (handled === null) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
return this.runRequest(routeId, url, options, requestData, ++retries); return this.runRequest(routeId, url, options, requestData, ++retries);
} }
// We are out of retries, throw an error return handled;
throw new HTTPError(status, method, url, requestData);
} else {
// Handle possible malformed requests
if (status >= 400 && status < 500) {
// If we receive this status code, it means the token we had is no longer valid.
if (status === 401 && requestData.auth) {
this.manager.setToken(null!);
}
// The request will not succeed for some reason, parse the error returned from the api
const data = (await parseResponse(res)) as DiscordErrorData | OAuthErrorData;
// throw the API error
throw new DiscordAPIError(data, 'code' in data ? data.code : data.error, status, method, url, requestData);
}
return res;
} }
} }
} }
interface PolyFillAbortSignal {
readonly aborted: boolean;
addEventListener(type: 'abort', listener: () => void): void;
removeEventListener(type: 'abort', listener: () => void): void;
}

View File

@@ -0,0 +1,157 @@
import { setTimeout, clearTimeout } from 'node:timers';
import { request, type Dispatcher } from 'undici';
import type { RequestOptions } from '../REST.js';
import type { HandlerRequestData, RequestManager, RouteData } from '../RequestManager.js';
import type { DiscordErrorData, OAuthErrorData } from '../errors/DiscordAPIError.js';
import { DiscordAPIError } from '../errors/DiscordAPIError.js';
import { HTTPError } from '../errors/HTTPError.js';
import { RESTEvents } from '../utils/constants.js';
import { parseResponse, shouldRetry } from '../utils/utils.js';
import type { PolyFillAbortSignal } from './IHandler.js';
/**
* Invalid request limiting is done on a per-IP basis, not a per-token basis.
* The best we can do is track invalid counts process-wide (on the theory that
* users could have multiple bots run from one process) rather than per-bot.
* Therefore, store these at file scope here rather than in the client's
* RESTManager object.
*/
let invalidCount = 0;
let invalidCountResetTime: number | null = null;
/**
* Increment the invalid request count and emit warning if necessary
*
* @internal
*/
export function incrementInvalidCount(manager: RequestManager) {
if (!invalidCountResetTime || invalidCountResetTime < Date.now()) {
invalidCountResetTime = Date.now() + 1_000 * 60 * 10;
invalidCount = 0;
}
invalidCount++;
const emitInvalid =
manager.options.invalidRequestWarningInterval > 0 &&
invalidCount % manager.options.invalidRequestWarningInterval === 0;
if (emitInvalid) {
// Let library users know periodically about invalid requests
manager.emit(RESTEvents.InvalidRequestWarning, {
count: invalidCount,
remainingTime: invalidCountResetTime - Date.now(),
});
}
}
/**
* Performs the actual network request for a request handler
*
* @param manager - The manager that holds options and emits informational events
* @param routeId - The generalized api route with literal ids for major parameters
* @param url - The fully resolved url to make the request to
* @param options - The fetch options needed to make the request
* @param requestData - Extra data from the user's request needed for errors and additional processing
* @param retries - The number of retries this request has already attempted (recursion occurs on the handler)
* @returns The respond from the network or `null` when the request should be retried
* @internal
*/
export async function makeNetworkRequest(
manager: RequestManager,
routeId: RouteData,
url: string,
options: RequestOptions,
requestData: HandlerRequestData,
retries: number,
) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), manager.options.timeout).unref();
if (requestData.signal) {
// The type polyfill is required because Node.js's types are incomplete.
const signal = requestData.signal as PolyFillAbortSignal;
// If the user signal was aborted, abort the controller, else abort the local signal.
// The reason why we don't re-use the user's signal, is because users may use the same signal for multiple
// requests, and we do not want to cause unexpected side-effects.
if (signal.aborted) controller.abort();
else signal.addEventListener('abort', () => controller.abort());
}
let res: Dispatcher.ResponseData;
try {
res = await request(url, { ...options, signal: controller.signal });
} catch (error: unknown) {
if (!(error instanceof Error)) throw error;
// Retry the specified number of times if needed
if (shouldRetry(error) && retries !== manager.options.retries) {
// Retry is handled by the handler upon receiving null
return null;
}
throw error;
} finally {
clearTimeout(timeout);
}
if (manager.listenerCount(RESTEvents.Response)) {
manager.emit(
RESTEvents.Response,
{
method: options.method ?? 'get',
path: routeId.original,
route: routeId.bucketRoute,
options,
data: requestData,
retries,
},
{ ...res },
);
}
return res;
}
/**
* Handles 5xx and 4xx errors (not 429's) conventionally. 429's should be handled before calling this function
*
* @param manager - The manager that holds options and emits informational events
* @param res - The response received from {@link makeNetworkRequest}
* @param method - The method used to make the request
* @param url - The fully resolved url to make the request to
* @param requestData - Extra data from the user's request needed for errors and additional processing
* @param retries - The number of retries this request has already attempted (recursion occurs on the handler)
* @returns - The response if the status code is not handled or null to request a retry
*/
export async function handleErrors(
manager: RequestManager,
res: Dispatcher.ResponseData,
method: string,
url: string,
requestData: HandlerRequestData,
retries: number,
) {
const status = res.statusCode;
if (status >= 500 && status < 600) {
// Retry the specified number of times for possible server side issues
if (retries !== manager.options.retries) {
return null;
}
// We are out of retries, throw an error
throw new HTTPError(status, method, url, requestData);
} else {
// Handle possible malformed requests
if (status >= 400 && status < 500) {
// If we receive this status code, it means the token we had is no longer valid.
if (status === 401 && requestData.auth) {
manager.setToken(null!);
}
// The request will not succeed for some reason, parse the error returned from the api
const data = (await parseResponse(res)) as DiscordErrorData | OAuthErrorData;
// throw the API error
throw new DiscordAPIError(data, 'code' in data ? data.code : data.error, status, method, url, requestData);
}
return res;
}
}

View File

@@ -55,3 +55,5 @@ export const OverwrittenMimeTypes = {
// https://github.com/discordjs/discord.js/issues/8557 // https://github.com/discordjs/discord.js/issues/8557
'image/apng': 'image/png', 'image/apng': 'image/png',
} as const satisfies Readonly<Record<string, string>>; } as const satisfies Readonly<Record<string, string>>;
export const BurstHandlerMajorIdKey = 'burst';

View File

@@ -3,8 +3,9 @@ import { URLSearchParams } from 'node:url';
import { types } from 'node:util'; import { types } from 'node:util';
import type { RESTPatchAPIChannelJSONBody } from 'discord-api-types/v10'; import type { RESTPatchAPIChannelJSONBody } from 'discord-api-types/v10';
import { FormData, type Dispatcher, type RequestInit } from 'undici'; import { FormData, type Dispatcher, type RequestInit } from 'undici';
import type { RequestOptions } from '../REST.js'; import type { RateLimitData, RequestOptions } from '../REST.js';
import { RequestMethod } from '../RequestManager.js'; import { type RequestManager, RequestMethod } from '../RequestManager.js';
import { RateLimitError } from '../errors/RateLimitError.js';
export function parseHeader(header: string[] | string | undefined): string | undefined { export function parseHeader(header: string[] | string | undefined): string | undefined {
if (header === undefined || typeof header === 'string') { if (header === undefined || typeof header === 'string') {
@@ -148,3 +149,21 @@ export function shouldRetry(error: Error | NodeJS.ErrnoException) {
// Downlevel ECONNRESET to retry as it may be recoverable // Downlevel ECONNRESET to retry as it may be recoverable
return ('code' in error && error.code === 'ECONNRESET') || error.message.includes('ECONNRESET'); return ('code' in error && error.code === 'ECONNRESET') || error.message.includes('ECONNRESET');
} }
/**
* Determines whether the request should be queued or whether a RateLimitError should be thrown
*
* @internal
*/
export async function onRateLimit(manager: RequestManager, rateLimitData: RateLimitData) {
const { options } = manager;
if (!options.rejectOnRateLimit) return;
const shouldThrow =
typeof options.rejectOnRateLimit === 'function'
? await options.rejectOnRateLimit(rateLimitData)
: options.rejectOnRateLimit.some((route) => rateLimitData.route.startsWith(route.toLowerCase()));
if (shouldThrow) {
throw new RateLimitError(rateLimitData);
}
}