feat: @discordjs/ws (#8260)

Co-authored-by: Parbez <imranbarbhuiya.fsd@gmail.com>
This commit is contained in:
DD
2022-07-22 20:13:47 +03:00
committed by GitHub
parent 830c670c61
commit 748d7271c4
37 changed files with 3659 additions and 2612 deletions

View File

@@ -6,5 +6,6 @@
"source.fixAll": true,
"source.organizeImports": false
},
"unocss.root": "./packages/website"
"unocss.root": "./packages/website",
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@@ -45,6 +45,12 @@ runs:
files: ./packages/website/coverage/cobertura-coverage.xml
flags: website
- name: Upload WS Coverage
uses: codecov/codecov-action@v3
with:
files: ./packages/ws/coverage/cobertura-coverage.xml
flags: ws
- name: Upload Utilities Coverage
uses: codecov/codecov-action@v3
with:

View File

@@ -0,0 +1,5 @@
{
"name": "ws",
"org": "discordjs",
"packagePath": "packages/ws"
}

View File

@@ -0,0 +1,3 @@
{
"extends": "../../.eslintrc.json"
}

27
packages/ws/.gitignore vendored Normal file
View File

@@ -0,0 +1,27 @@
# Packages
node_modules/
# Log files
logs/
*.log
npm-debug.log*
# Runtime data
pids
*.pid
*.seed
# Env
.env
# Dist
dist/
typings/
docs/**/*
!docs/index.json
!docs/README.md
# Miscellaneous
.tmp/
coverage/
tsconfig.tsbuildinfo

View File

@@ -0,0 +1,9 @@
# Autogenerated
CHANGELOG.md
.turbo
dist/
docs/**/*
!docs/index.yml
!docs/README.md
coverage/
tsup.config.*.mjs

View File

@@ -0,0 +1 @@
module.exports = require('../../.prettierrc.json');

191
packages/ws/LICENSE Normal file
View File

@@ -0,0 +1,191 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Copyright 2022 Noel Buechler
Copyright 2022 Charlotte Cristea
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

124
packages/ws/README.md Normal file
View File

@@ -0,0 +1,124 @@
<div align="center">
<br />
<p>
<a href="https://discord.js.org"><img src="https://discord.js.org/static/logo.svg" width="546" alt="discord.js" /></a>
</p>
<br />
<p>
<a href="https://discord.gg/djs"><img src="https://img.shields.io/discord/222078108977594368?color=5865F2&logo=discord&logoColor=white" alt="Discord server" /></a>
<a href="https://www.npmjs.com/package/@discordjs/ws"><img src="https://img.shields.io/npm/v/@discordjs/ws.svg?maxAge=3600" alt="npm version" /></a>
<a href="https://www.npmjs.com/package/@discordjs/ws"><img src="https://img.shields.io/npm/dt/@discordjs/ws.svg?maxAge=3600" alt="npm downloads" /></a>
<a href="https://github.com/discordjs/discord.js/actions"><img src="https://github.com/discordjs/discord.js/actions/workflows/test.yml/badge.svg" alt="Build status" /></a>
<a href="https://codecov.io/gh/discordjs/discord.js" ><img src="https://codecov.io/gh/discordjs/discord.js/branch/main/graph/badge.svg?precision=2&flag=ws" alt="Code coverage" /></a>
</p>
</div>
## About
`@discordjs/ws` is a powerful wrapper around Discord's gateway.
## Installation
**Node.js 16.9.0 or newer is required.**
```sh-session
npm install @discordjs/ws
yarn add @discordjs/ws
pnpm add @discordjs/ws
```
## Example usage
```ts
import { WebSocketManager, WebSocketShardEvents } from '@discordjs/ws';
import { REST } from '@discordjs/rest';
const rest = new REST().setToken(process.env.DISCORD_TOKEN);
// This example will spawn Discord's recommended shard count, all under the current process.
const manager = new WebSocketManager({
token: process.env.DISCORD_TOKEN,
intents: 0, // for no intents
rest,
});
await manager.connect();
```
### Specify shards
```ts
// Spawn 4 shards
const manager = new WebSocketManager({
token: process.env.DISCORD_TOKEN,
intents: 0,
rest,
shardCount: 4,
});
// The manager also supports being responsible for only a subset of your shards:
// Your bot will run 8 shards overall
// This manager will only take care of 0, 2, 4, and 6
const manager = new WebSocketManager({
token: process.env.DISCORD_TOKEN,
intents: 0,
rest,
shardCount: 8,
shardIds: [0, 2, 4, 6],
});
// Alternatively, if your shards are consecutive, you can pass in a range
const manager = new WebSocketManager({
token: process.env.DISCORD_TOKEN,
intents: 0,
rest,
shardCount: 8,
shardIds: {
start: 0,
end: 4,
},
});
```
### Specify `worker_threads`
You can also have the shards spawn in worker threads:
```ts
import { WebSocketManager, WebSocketShardEvents, WorkerShardingStrategy } from '@discordjs/ws';
const manager = new WebSocketManager({
token: process.env.DISCORD_TOKEN,
intents: 0,
rest,
shardCount: 6,
});
// This will cause 3 workers to spawn, 2 shards per each
manager.setStrategy(new WorkerShardingStrategy(manager, { shardsPerWorker: 2 }));
// Or maybe you want all your shards under a single worker
manager.setStrategy(new WorkerShardingStrategy(manager, { shardsPerWorker: 'all' }));
```
## Links
- [Website](https://discord.js.org/) ([source](https://github.com/discordjs/discord.js/tree/main/packages/website))
- [Documentation](https://discord.js.org/#/docs/ws)
- [Guide](https://discordjs.guide/) ([source](https://github.com/discordjs/guide))
See also the [Update Guide](https://discordjs.guide/additional-info/changes-in-v13.html), including updated and removed items in the library.
- [discord.js Discord server](https://discord.gg/djs)
- [Discord API Discord server](https://discord.gg/discord-api)
- [GitHub](https://github.com/discordjs/discord.js/tree/main/packages/ws)
- [npm](https://www.npmjs.com/package/@discordjs/ws)
- [Related libraries](https://discord.com/developers/docs/topics/community-resources#libraries)
## Contributing
Before creating an issue, please ensure that it hasn't already been reported/suggested, and double-check the
[documentation](https://discord.js.org/#/docs/ws).
See [the contribution guide](https://github.com/discordjs/discord.js/blob/main/.github/CONTRIBUTING.md) if you'd like to submit a PR.
## Help
If you don't understand something in the documentation, you are experiencing problems, or you just need a gentle
nudge in the right direction, please don't hesitate to join our official [discord.js Server](https://discord.gg/djs).

View File

@@ -0,0 +1,83 @@
import { REST } from '@discordjs/rest';
import { MockAgent, Interceptable } from 'undici';
import { beforeEach, test, vi, expect } from 'vitest';
import {
managerToFetchingStrategyOptions,
WorkerContextFetchingStrategy,
WorkerRecievePayload,
WorkerSendPayload,
WebSocketManager,
WorkerSendPayloadOp,
WorkerRecievePayloadOp,
} from '../../src';
let mockAgent: MockAgent;
let mockPool: Interceptable;
beforeEach(() => {
mockAgent = new MockAgent();
mockAgent.disableNetConnect();
mockPool = mockAgent.get('https://discord.com');
});
const session = {
shardId: 0,
shardCount: 1,
sequence: 123,
sessionId: 'abc',
};
vi.mock('node:worker_threads', async () => {
const { EventEmitter }: typeof import('node:events') = await vi.importActual('node:events');
class MockParentPort extends EventEmitter {
public postMessage(message: WorkerRecievePayload) {
if (message.op === WorkerRecievePayloadOp.RetrieveSessionInfo) {
const response: WorkerSendPayload = {
op: WorkerSendPayloadOp.SessionInfoResponse,
nonce: message.nonce,
session,
};
this.emit('message', response);
}
}
}
return {
parentPort: new MockParentPort(),
isMainThread: false,
};
});
test('session info', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest });
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(() => ({
data: {
shards: 1,
session_start_limit: {
max_concurrency: 3,
reset_after: 60,
remaining: 3,
total: 3,
},
url: 'wss://gateway.discord.gg',
},
statusCode: 200,
responseOptions: {
headers: {
'content-type': 'application/json',
},
},
}));
const strategy = new WorkerContextFetchingStrategy(await managerToFetchingStrategyOptions(manager));
strategy.updateSessionInfo(0, session);
expect(await strategy.retrieveSessionInfo(0)).toEqual(session);
});

View File

@@ -0,0 +1,195 @@
import { REST } from '@discordjs/rest';
import {
GatewayDispatchEvents,
GatewayDispatchPayload,
GatewayOpcodes,
GatewaySendPayload,
} from 'discord-api-types/v10';
import { MockAgent, Interceptable } from 'undici';
import { beforeEach, test, vi, expect, afterEach } from 'vitest';
import {
WorkerRecievePayload,
WorkerSendPayload,
WebSocketManager,
WorkerSendPayloadOp,
WorkerRecievePayloadOp,
WorkerShardingStrategy,
WebSocketShardEvents,
SessionInfo,
} from '../../src';
let mockAgent: MockAgent;
let mockPool: Interceptable;
const mockConstructor = vi.fn();
const mockSend = vi.fn();
const mockTerminate = vi.fn();
const memberChunkData: GatewayDispatchPayload = {
op: GatewayOpcodes.Dispatch,
s: 123,
t: GatewayDispatchEvents.GuildMembersChunk,
d: {
guild_id: '123',
members: [],
},
};
const sessionInfo: SessionInfo = {
shardId: 0,
shardCount: 2,
sequence: 123,
sessionId: 'abc',
};
vi.mock('node:worker_threads', async () => {
const { EventEmitter }: typeof import('node:events') = await vi.importActual('node:events');
class MockWorker extends EventEmitter {
public constructor(...args: any[]) {
super();
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
mockConstructor(...args);
// need to delay this by an event loop cycle to allow the strategy to attach a listener
setImmediate(() => this.emit('online'));
}
public postMessage(message: WorkerSendPayload) {
switch (message.op) {
case WorkerSendPayloadOp.Connect: {
const response: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Connected,
shardId: message.shardId,
};
this.emit('message', response);
break;
}
case WorkerSendPayloadOp.Destroy: {
const response: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Destroyed,
shardId: message.shardId,
};
this.emit('message', response);
break;
}
case WorkerSendPayloadOp.Send: {
if (message.payload.op === GatewayOpcodes.RequestGuildMembers) {
const response: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Event,
shardId: message.shardId,
event: WebSocketShardEvents.Dispatch,
data: memberChunkData,
};
this.emit('message', response);
// Fetch session info
const sessionFetch: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.RetrieveSessionInfo,
shardId: message.shardId,
nonce: Math.random(),
};
this.emit('message', sessionFetch);
}
mockSend(message.shardId, message.payload);
break;
}
case WorkerSendPayloadOp.SessionInfoResponse: {
message.session ??= sessionInfo;
const session: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.UpdateSessionInfo,
shardId: message.session.shardId,
session: { ...message.session, sequence: message.session.sequence + 1 },
};
this.emit('message', session);
break;
}
}
}
public terminate = mockTerminate;
}
return {
Worker: MockWorker,
};
});
beforeEach(() => {
mockAgent = new MockAgent();
mockAgent.disableNetConnect();
mockPool = mockAgent.get('https://discord.com');
});
afterEach(() => {
mockConstructor.mockRestore();
mockSend.mockRestore();
mockTerminate.mockRestore();
});
test('spawn, connect, send a message, session info, and destroy', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const mockRetrieveSessionInfo = vi.fn();
const mockUpdateSessionInfo = vi.fn();
const manager = new WebSocketManager({
token: 'A-Very-Fake-Token',
intents: 0,
rest,
shardIds: [0, 1],
retrieveSessionInfo: mockRetrieveSessionInfo,
updateSessionInfo: mockUpdateSessionInfo,
});
const managerEmitSpy = vi.spyOn(manager, 'emit');
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(() => ({
data: {
shards: 1,
session_start_limit: {
max_concurrency: 3,
reset_after: 60,
remaining: 3,
total: 3,
},
url: 'wss://gateway.discord.gg',
},
statusCode: 200,
responseOptions: {
headers: {
'content-type': 'application/json',
},
},
}));
const strategy = new WorkerShardingStrategy(manager, { shardsPerWorker: 'all' });
manager.setStrategy(strategy);
await manager.connect();
expect(mockConstructor).toHaveBeenCalledWith(
expect.stringContaining('worker.cjs'),
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
expect.objectContaining({ workerData: expect.objectContaining({ shardIds: [0, 1] }) }),
);
const payload: GatewaySendPayload = { op: GatewayOpcodes.RequestGuildMembers, d: { guild_id: '123', limit: 0 } };
await manager.send(0, payload);
expect(mockSend).toHaveBeenCalledWith(0, payload);
expect(managerEmitSpy).toHaveBeenCalledWith(WebSocketShardEvents.Dispatch, {
...memberChunkData,
shardId: 0,
});
expect(mockRetrieveSessionInfo).toHaveBeenCalledWith(0);
expect(mockUpdateSessionInfo).toHaveBeenCalledWith(0, { ...sessionInfo, sequence: sessionInfo.sequence + 1 });
await manager.destroy({ reason: 'souji is a soft boi :3' });
expect(mockTerminate).toHaveBeenCalled();
});

View File

@@ -0,0 +1,46 @@
import { setTimeout as sleep } from 'node:timers/promises';
import { expect, Mock, test, vi } from 'vitest';
import { IdentifyThrottler, WebSocketManager } from '../../src';
vi.mock('node:timers/promises', () => ({
setTimeout: vi.fn(),
}));
const fetchGatewayInformation = vi.fn();
const manager = {
fetchGatewayInformation,
} as unknown as WebSocketManager;
const throttler = new IdentifyThrottler(manager);
vi.useFakeTimers();
const NOW = vi.fn().mockReturnValue(Date.now());
global.Date.now = NOW;
test('wait for identify', async () => {
fetchGatewayInformation.mockReturnValue({
session_start_limit: {
max_concurrency: 2,
},
});
// First call should never wait
await throttler.waitForIdentify();
expect(sleep).not.toHaveBeenCalled();
// Second call still won't wait because max_concurrency is 2
await throttler.waitForIdentify();
expect(sleep).not.toHaveBeenCalled();
// Third call should wait
await throttler.waitForIdentify();
expect(sleep).toHaveBeenCalled();
(sleep as Mock).mockRestore();
// Fourth call shouldn't wait, because our max_concurrency is 2 and we waited for a reset
await throttler.waitForIdentify();
expect(sleep).not.toHaveBeenCalled();
});

View File

@@ -0,0 +1,197 @@
import { REST } from '@discordjs/rest';
import { APIGatewayBotInfo, GatewayOpcodes, GatewaySendPayload } from 'discord-api-types/v10';
import { MockAgent, Interceptable } from 'undici';
import { beforeEach, describe, expect, test, vi } from 'vitest';
import { IShardingStrategy, WebSocketManager } from '../../src';
vi.useFakeTimers();
let mockAgent: MockAgent;
let mockPool: Interceptable;
beforeEach(() => {
mockAgent = new MockAgent();
mockAgent.disableNetConnect();
mockPool = mockAgent.get('https://discord.com');
});
const NOW = vi.fn().mockReturnValue(Date.now());
global.Date.now = NOW;
test('fetch gateway information', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest });
const data: APIGatewayBotInfo = {
shards: 1,
session_start_limit: {
max_concurrency: 3,
reset_after: 60,
remaining: 3,
total: 3,
},
url: 'wss://gateway.discord.gg',
};
const fetch = vi.fn(() => ({
data,
statusCode: 200,
responseOptions: {
headers: {
'content-type': 'application/json',
},
},
}));
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(fetch);
const initial = await manager.fetchGatewayInformation();
expect(initial).toEqual(data);
expect(fetch).toHaveBeenCalledOnce();
fetch.mockRestore();
const cached = await manager.fetchGatewayInformation();
expect(cached).toEqual(data);
expect(fetch).not.toHaveBeenCalled();
fetch.mockRestore();
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(fetch);
const forced = await manager.fetchGatewayInformation(true);
expect(forced).toEqual(data);
expect(fetch).toHaveBeenCalledOnce();
fetch.mockRestore();
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(fetch);
NOW.mockReturnValue(Infinity);
const cacheExpired = await manager.fetchGatewayInformation();
expect(cacheExpired).toEqual(data);
expect(fetch).toHaveBeenCalledOnce();
});
describe('get shard count', () => {
test('with shard count', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardCount: 2 });
expect(await manager.getShardCount()).toBe(2);
});
test('with shard ids array', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const shardIds = [5, 9];
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardIds });
expect(await manager.getShardCount()).toBe(shardIds.at(-1)! + 1);
});
test('with shard id range', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const shardIds = { start: 5, end: 9 };
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardIds });
expect(await manager.getShardCount()).toBe(shardIds.end + 1);
});
});
test('update shard count', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardCount: 2 });
const data: APIGatewayBotInfo = {
shards: 1,
session_start_limit: {
max_concurrency: 3,
reset_after: 60,
remaining: 3,
total: 3,
},
url: 'wss://gateway.discord.gg',
};
const fetch = vi.fn(() => ({
data,
statusCode: 200,
responseOptions: {
headers: {
'content-type': 'application/json',
},
},
}));
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(fetch);
expect(await manager.getShardCount()).toBe(2);
expect(fetch).not.toHaveBeenCalled();
fetch.mockRestore();
mockPool
.intercept({
path: '/api/v10/gateway/bot',
method: 'GET',
})
.reply(fetch);
await manager.updateShardCount(3);
expect(await manager.getShardCount()).toBe(3);
expect(fetch).toHaveBeenCalled();
});
test('it handles passing in both shardIds and shardCount', async () => {
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const shardIds = { start: 2, end: 3 };
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardIds, shardCount: 4 });
expect(await manager.getShardCount()).toBe(4);
expect(await manager.getShardIds()).toStrictEqual([2, 3]);
});
test('strategies', async () => {
class MockStrategy implements IShardingStrategy {
public spawn = vi.fn();
public connect = vi.fn();
public destroy = vi.fn();
public send = vi.fn();
}
const rest = new REST().setAgent(mockAgent).setToken('A-Very-Fake-Token');
const shardIds = [0, 1, 2];
const manager = new WebSocketManager({ token: 'A-Very-Fake-Token', intents: 0, rest, shardIds });
const strategy = new MockStrategy();
manager.setStrategy(strategy);
await manager.connect();
expect(strategy.spawn).toHaveBeenCalledWith(shardIds);
expect(strategy.connect).toHaveBeenCalled();
const destroyOptions = { reason: ':3' };
await manager.destroy(destroyOptions);
expect(strategy.destroy).toHaveBeenCalledWith(destroyOptions);
const send: GatewaySendPayload = { op: GatewayOpcodes.RequestGuildMembers, d: { guild_id: '1234', limit: 0 } };
await manager.send(0, send);
expect(strategy.send).toHaveBeenCalledWith(0, send);
});

View File

@@ -0,0 +1,376 @@
/**
* Config file for API Extractor. For more info, please visit: https://api-extractor.com
*/
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
/**
* Optionally specifies another JSON config file that this file extends from. This provides a way for
* standard settings to be shared across multiple projects.
*
* If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains
* the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be
* resolved using NodeJS require().
*
* SUPPORTED TOKENS: none
* DEFAULT VALUE: ""
*/
// "extends": "./shared/api-extractor-base.json"
// "extends": "my-package/include/api-extractor-base.json"
/**
* Determines the "<projectFolder>" token that can be used with other config file settings. The project folder
* typically contains the tsconfig.json and package.json config files, but the path is user-defined.
*
* The path is resolved relative to the folder of the config file that contains the setting.
*
* The default value for "projectFolder" is the token "<lookup>", which means the folder is determined by traversing
* parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder
* that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error
* will be reported.
*
* SUPPORTED TOKENS: <lookup>
* DEFAULT VALUE: "<lookup>"
*/
// "projectFolder": "..",
/**
* (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor
* analyzes the symbols exported by this module.
*
* The file extension must be ".d.ts" and not ".ts".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
*/
"mainEntryPointFilePath": "<projectFolder>/dist/index.d.ts",
/**
* A list of NPM package names whose exports should be treated as part of this package.
*
* For example, suppose that Webpack is used to generate a distributed bundle for the project "library1",
* and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part
* of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly
* imports library2. To avoid this, we can specify:
*
* "bundledPackages": [ "library2" ],
*
* This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been
* local files for library1.
*/
"bundledPackages": [],
/**
* Determines how the TypeScript compiler engine will be invoked by API Extractor.
*/
"compiler": {
/**
* Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* Note: This setting will be ignored if "overrideTsconfig" is used.
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/tsconfig.json"
*/
// "tsconfigFilePath": "<projectFolder>/tsconfig.json",
/**
* Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk.
* The object must conform to the TypeScript tsconfig schema:
*
* http://json.schemastore.org/tsconfig
*
* If omitted, then the tsconfig.json file will be read from the "projectFolder".
*
* DEFAULT VALUE: no overrideTsconfig section
*/
// "overrideTsconfig": {
// . . .
// }
/**
* This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended
* and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when
* dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses
* for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck.
*
* DEFAULT VALUE: false
*/
// "skipLibCheck": true,
},
/**
* Configures how the API report file (*.api.md) will be generated.
*/
"apiReport": {
/**
* (REQUIRED) Whether to generate an API report.
*/
"enabled": false
/**
* The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce
* a full file path.
*
* The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/".
*
* SUPPORTED TOKENS: <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<unscopedPackageName>.api.md"
*/
// "reportFileName": "<unscopedPackageName>.api.md",
/**
* Specifies the folder where the API report file is written. The file name portion is determined by
* the "reportFileName" setting.
*
* The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy,
* e.g. for an API review.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/temp/"
*/
// "reportFolder": "<projectFolder>/temp/",
/**
* Specifies the folder where the temporary report file is written. The file name portion is determined by
* the "reportFileName" setting.
*
* After the temporary file is written to disk, it is compared with the file in the "reportFolder".
* If they are different, a production build will fail.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/temp/"
*/
// "reportTempFolder": "<projectFolder>/temp/"
},
/**
* Configures how the doc model file (*.api.json) will be generated.
*/
"docModel": {
/**
* (REQUIRED) Whether to generate a doc model file.
*/
"enabled": true,
/**
* The output path for the doc model file. The file extension should be ".api.json".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/temp/<unscopedPackageName>.api.json"
*/
"apiJsonFilePath": "<projectFolder>/docs/docs.api.json"
},
/**
* Configures how the .d.ts rollup file will be generated.
*/
"dtsRollup": {
/**
* (REQUIRED) Whether to generate the .d.ts rollup file.
*/
"enabled": false
/**
* Specifies the output path for a .d.ts rollup file to be generated without any trimming.
* This file will include all declarations that are exported by the main entry point.
*
* If the path is an empty string, then this file will not be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<projectFolder>/dist/<unscopedPackageName>.d.ts"
*/
// "untrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>.d.ts",
/**
* Specifies the output path for a .d.ts rollup file to be generated with trimming for an "alpha" release.
* This file will include only declarations that are marked as "@public", "@beta", or "@alpha".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: ""
*/
// "alphaTrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>-alpha.d.ts",
/**
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release.
* This file will include only declarations that are marked as "@public" or "@beta".
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: ""
*/
// "betaTrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>-beta.d.ts",
/**
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release.
* This file will include only declarations that are marked as "@public".
*
* If the path is an empty string, then this file will not be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: ""
*/
// "publicTrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>-public.d.ts",
/**
* When a declaration is trimmed, by default it will be replaced by a code comment such as
* "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the
* declaration completely.
*
* DEFAULT VALUE: false
*/
// "omitTrimmingComments": true
},
/**
* Configures how the tsdoc-metadata.json file will be generated.
*/
"tsdocMetadata": {
/**
* Whether to generate the tsdoc-metadata.json file.
*
* DEFAULT VALUE: true
*/
// "enabled": true,
/**
* Specifies where the TSDoc metadata file should be written.
*
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
* prepend a folder token such as "<projectFolder>".
*
* The default value is "<lookup>", which causes the path to be automatically inferred from the "tsdocMetadata",
* "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup
* falls back to "tsdoc-metadata.json" in the package folder.
*
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
* DEFAULT VALUE: "<lookup>"
*/
// "tsdocMetadataFilePath": "<projectFolder>/dist/tsdoc-metadata.json"
},
/**
* Specifies what type of newlines API Extractor should use when writing output files. By default, the output files
* will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead.
* To use the OS's default newline kind, specify "os".
*
* DEFAULT VALUE: "crlf"
*/
"newlineKind": "lf",
/**
* Configures how API Extractor reports error and warning messages produced during analysis.
*
* There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages.
*/
"messages": {
/**
* Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing
* the input .d.ts files.
*
* TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551"
*
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
*/
"compilerMessageReporting": {
/**
* Configures the default routing for messages that don't match an explicit rule in this table.
*/
"default": {
/**
* Specifies whether the message should be written to the the tool's output log. Note that
* the "addToApiReportFile" property may supersede this option.
*
* Possible values: "error", "warning", "none"
*
* Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail
* and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes
* the "--local" option), the warning is displayed but the build will not fail.
*
* DEFAULT VALUE: "warning"
*/
"logLevel": "warning"
/**
* When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md),
* then the message will be written inside that file; otherwise, the message is instead logged according to
* the "logLevel" option.
*
* DEFAULT VALUE: false
*/
// "addToApiReportFile": false
}
// "TS2551": {
// "logLevel": "warning",
// "addToApiReportFile": true
// },
//
// . . .
},
/**
* Configures handling of messages reported by API Extractor during its analysis.
*
* API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag"
*
* DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings
*/
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
// "addToApiReportFile": false
}
// "ae-extra-release-tag": {
// "logLevel": "warning",
// "addToApiReportFile": true
// },
//
// . . .
},
/**
* Configures handling of messages reported by the TSDoc parser when analyzing code comments.
*
* TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text"
*
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
*/
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
// "addToApiReportFile": false
}
// "tsdoc-link-tag-unescaped-text": {
// "logLevel": "warning",
// "addToApiReportFile": true
// },
//
// . . .
}
}
}

View File

@@ -0,0 +1,9 @@
import { createUnbuildConfig } from '../../build.config';
export default createUnbuildConfig({
entries: [
{ builder: 'rollup', input: 'src/index' },
{ builder: 'rollup', input: 'src/strategies/sharding/worker' },
],
externals: ['zlib-sync'],
});

63
packages/ws/cliff.toml Normal file
View File

@@ -0,0 +1,63 @@
[changelog]
header = """
# Changelog
All notable changes to this project will be documented in this file.\n
"""
body = """
{% if version %}\
# [{{ version | trim_start_matches(pat="v") }}]\
{% if previous %}\
{% if previous.version %}\
(https://github.com/discordjs/discord.js/compare/{{ previous.version }}...{{ version }})\
{% else %}\
(https://github.com/discordjs/discord.js/tree/{{ version }})\
{% endif %}\
{% endif %} \
- ({{ timestamp | date(format="%Y-%m-%d") }})
{% else %}\
# [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
## {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.scope %}\
**{{commit.scope}}:** \
{% endif %}\
{{ commit.message | upper_first }} ([{{ commit.id | truncate(length=7, end="") }}](https://github.com/discordjs/discord.js/commit/{{ commit.id }}))\
{% if commit.breaking %}\
{% for breakingChange in commit.footers %}\
\n{% raw %} {% endraw %}- **{{ breakingChange.token }}{{ breakingChange.separator }}** {{ breakingChange.value }}\
{% endfor %}\
{% endif %}\
{% endfor %}
{% endfor %}\n
"""
trim = true
footer = ""
[git]
conventional_commits = true
filter_unconventional = true
commit_parsers = [
{ message = "^feat", group = "Features"},
{ message = "^fix", group = "Bug Fixes"},
{ message = "^docs", group = "Documentation"},
{ message = "^perf", group = "Performance"},
{ message = "^refactor", group = "Refactor"},
{ message = "^typings", group = "Typings"},
{ message = "^types", group = "Typings"},
{ message = ".*deprecated", body = ".*deprecated", group = "Deprecation"},
{ message = "^revert", skip = true},
{ message = "^style", group = "Styling"},
{ message = "^test", group = "Testing"},
{ message = "^chore", skip = true},
{ message = "^ci", skip = true},
{ message = "^build", skip = true},
{ body = ".*security", group = "Security"},
]
filter_commits = true
tag_pattern = "@discordjs/ws@[0-9]*"
ignore_tags = ""
topo_order = false
sort_commits = "newest"

View File

@@ -0,0 +1 @@
## [View the documentation here.](https://discord.js.org/#/docs/ws)

View File

@@ -0,0 +1 @@
[{ "name": "General", "files": [{ "name": "Welcome", "id": "welcome", "path": "../../README.md" }] }]

88
packages/ws/package.json Normal file
View File

@@ -0,0 +1,88 @@
{
"name": "@discordjs/ws",
"version": "0.1.0-dev",
"description": "Wrapper around Discord's gateway",
"scripts": {
"test": "vitest run",
"build": "unbuild",
"lint": "prettier --check . && eslint src __tests__ --ext mjs,js,ts",
"format": "prettier --write . && eslint src __tests__ --ext mjs,js,ts --fix",
"docs": "docgen -i src/index.ts -c docs/index.json -o docs/docs.json --typescript && api-extractor run --local",
"prepack": "yarn build && yarn lint",
"changelog": "git cliff --prepend ./CHANGELOG.md -u -c ./cliff.toml -r ../../ --include-path 'packages/ws/*'",
"release": "cliff-jumper"
},
"main": "./dist/index.cjs",
"module": "./dist/index.mjs",
"typings": "./dist/index.d.ts",
"exports": {
"import": "./dist/index.mjs",
"require": "./dist/index.cjs",
"types": "./dist/index.d.ts"
},
"directories": {
"lib": "src",
"test": "__tests__"
},
"files": [
"dist"
],
"contributors": [
"Crawl <icrawltogo@gmail.com>",
"Amish Shah <amishshah.2k@gmail.com>",
"SpaceEEC <spaceeec@yahoo.com>",
"Vlad Frangu <kingdgrizzle@gmail.com>",
"Antonio Roman <kyradiscord@gmail.com>",
"DD <didinele.dev@gmail.com>"
],
"license": "Apache-2.0",
"keywords": [
"discord",
"api",
"gateway",
"discordapp",
"discordjs"
],
"repository": {
"type": "git",
"url": "git+https://github.com/discordjs/discord.js.git"
},
"bugs": {
"url": "https://github.com/discordjs/discord.js/issues"
},
"homepage": "https://discord.js.org",
"dependencies": {
"@discordjs/collection": "workspace:^",
"@discordjs/rest": "workspace:^",
"@sapphire/async-queue": "^1.3.1",
"@vladfrangu/async_event_emitter": "^2.0.1",
"discord-api-types": "^0.36.1",
"tslib": "^2.4.0",
"ws": "^8.8.0"
},
"devDependencies": {
"@discordjs/docgen": "workspace:^",
"@discordjs/scripts": "workspace:^",
"@favware/cliff-jumper": "^1.8.5",
"@microsoft/api-extractor": "^7.28.3",
"@types/node": "^18.0.3",
"@types/ws": "^8.5.3",
"c8": "^7.11.3",
"eslint": "^8.19.0",
"mock-socket": "^9.1.5",
"prettier": "^2.7.1",
"rollup-plugin-typescript2": "^0.32.1",
"tsup": "^6.1.3",
"typescript": "^4.7.4",
"unbuild": "^0.7.6",
"undici": "^5.8.0",
"vitest": "^0.17.0",
"zlib-sync": "^0.1.7"
},
"engines": {
"node": ">=16.9.0"
},
"publishConfig": {
"access": "public"
}
}

14
packages/ws/src/index.ts Normal file
View File

@@ -0,0 +1,14 @@
export * from './strategies/context/IContextFetchingStrategy';
export * from './strategies/context/SimpleContextFetchingStrategy';
export * from './strategies/context/WorkerContextFetchingStrategy';
export * from './strategies/sharding/IShardingStrategy';
export * from './strategies/sharding/SimpleShardingStrategy';
export * from './strategies/sharding/WorkerShardingStrategy';
export * from './utils/constants';
export * from './utils/IdentifyThrottler';
export * from './utils/utils';
export * from './ws/WebSocketManager';
export * from './ws/WebSocketShard';

View File

@@ -0,0 +1,31 @@
import type { Awaitable } from '@vladfrangu/async_event_emitter';
import type { APIGatewayBotInfo } from 'discord-api-types/v10';
import type { SessionInfo, WebSocketManager, WebSocketManagerOptions } from '../../ws/WebSocketManager';
export interface FetchingStrategyOptions
extends Omit<
WebSocketManagerOptions,
'retrieveSessionInfo' | 'updateSessionInfo' | 'shardCount' | 'shardIds' | 'rest'
> {
readonly gatewayInformation: APIGatewayBotInfo;
readonly shardCount: number;
}
/**
* Strategies responsible solely for making manager information accessible
*/
export interface IContextFetchingStrategy {
readonly options: FetchingStrategyOptions;
retrieveSessionInfo: (shardId: number) => Awaitable<SessionInfo | null>;
updateSessionInfo: (shardId: number, sessionInfo: SessionInfo | null) => Awaitable<void>;
}
export async function managerToFetchingStrategyOptions(manager: WebSocketManager): Promise<FetchingStrategyOptions> {
const { retrieveSessionInfo, updateSessionInfo, shardCount, shardIds, rest, ...managerOptions } = manager.options;
return {
...managerOptions,
gatewayInformation: await manager.fetchGatewayInformation(),
shardCount: await manager.getShardCount(),
};
}

View File

@@ -0,0 +1,14 @@
import type { FetchingStrategyOptions, IContextFetchingStrategy } from './IContextFetchingStrategy';
import type { SessionInfo, WebSocketManager } from '../../ws/WebSocketManager';
export class SimpleContextFetchingStrategy implements IContextFetchingStrategy {
public constructor(private readonly manager: WebSocketManager, public readonly options: FetchingStrategyOptions) {}
public async retrieveSessionInfo(shardId: number): Promise<SessionInfo | null> {
return this.manager.options.retrieveSessionInfo(shardId);
}
public updateSessionInfo(shardId: number, sessionInfo: SessionInfo | null) {
return this.manager.options.updateSessionInfo(shardId, sessionInfo);
}
}

View File

@@ -0,0 +1,49 @@
import { isMainThread, parentPort } from 'node:worker_threads';
import { Collection } from '@discordjs/collection';
import type { FetchingStrategyOptions, IContextFetchingStrategy } from './IContextFetchingStrategy';
import type { SessionInfo } from '../../ws/WebSocketManager';
import {
WorkerRecievePayload,
WorkerRecievePayloadOp,
WorkerSendPayload,
WorkerSendPayloadOp,
} from '../sharding/WorkerShardingStrategy';
export class WorkerContextFetchingStrategy implements IContextFetchingStrategy {
private readonly sessionPromises = new Collection<number, (session: SessionInfo | null) => void>();
public constructor(public readonly options: FetchingStrategyOptions) {
if (isMainThread) {
throw new Error('Cannot instantiate WorkerContextFetchingStrategy on the main thread');
}
parentPort!.on('message', (payload: WorkerSendPayload) => {
if (payload.op === WorkerSendPayloadOp.SessionInfoResponse) {
const resolve = this.sessionPromises.get(payload.nonce);
resolve?.(payload.session);
this.sessionPromises.delete(payload.nonce);
}
});
}
public async retrieveSessionInfo(shardId: number): Promise<SessionInfo | null> {
const nonce = Math.random();
const payload: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.RetrieveSessionInfo,
shardId,
nonce,
};
const promise = new Promise<SessionInfo | null>((resolve) => this.sessionPromises.set(nonce, resolve));
parentPort!.postMessage(payload);
return promise;
}
public updateSessionInfo(shardId: number, sessionInfo: SessionInfo | null) {
const payload: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.UpdateSessionInfo,
shardId,
session: sessionInfo,
};
parentPort!.postMessage(payload);
}
}

View File

@@ -0,0 +1,25 @@
import type { GatewaySendPayload } from 'discord-api-types/v10';
import type { Awaitable } from '../../utils/utils';
import type { WebSocketShardDestroyOptions } from '../../ws/WebSocketShard';
/**
* Strategies responsible for spawning, initializing connections, destroying shards, and relaying events
*/
export interface IShardingStrategy {
/**
* Spawns all the shards
*/
spawn: (shardIds: number[]) => Awaitable<void>;
/**
* Initializes all the shards
*/
connect: () => Awaitable<void>;
/**
* Destroys all the shards
*/
destroy: (options?: Omit<WebSocketShardDestroyOptions, 'recover'>) => Awaitable<void>;
/**
* Sends a payload to a shard
*/
send: (shardId: number, payload: GatewaySendPayload) => Awaitable<void>;
}

View File

@@ -0,0 +1,64 @@
import { Collection } from '@discordjs/collection';
import type { GatewaySendPayload } from 'discord-api-types/v10';
import type { IShardingStrategy } from './IShardingStrategy';
import { IdentifyThrottler } from '../../utils/IdentifyThrottler';
import type { WebSocketManager } from '../../ws/WebSocketManager';
import { WebSocketShard, WebSocketShardDestroyOptions, WebSocketShardEvents } from '../../ws/WebSocketShard';
import { managerToFetchingStrategyOptions } from '../context/IContextFetchingStrategy';
import { SimpleContextFetchingStrategy } from '../context/SimpleContextFetchingStrategy';
/**
* Simple strategy that just spawns shards in the current process
*/
export class SimpleShardingStrategy implements IShardingStrategy {
private readonly manager: WebSocketManager;
private readonly shards = new Collection<number, WebSocketShard>();
private readonly throttler: IdentifyThrottler;
public constructor(manager: WebSocketManager) {
this.manager = manager;
this.throttler = new IdentifyThrottler(manager);
}
public async spawn(shardIds: number[]) {
const strategyOptions = await managerToFetchingStrategyOptions(this.manager);
for (const shardId of shardIds) {
const strategy = new SimpleContextFetchingStrategy(this.manager, strategyOptions);
const shard = new WebSocketShard(strategy, shardId);
for (const event of Object.values(WebSocketShardEvents)) {
// @ts-expect-error
shard.on(event, (payload) => this.manager.emit(event, { ...payload, shardId }));
}
this.shards.set(shardId, shard);
}
}
public async connect() {
const promises = [];
for (const shard of this.shards.values()) {
await this.throttler.waitForIdentify();
promises.push(shard.connect());
}
await Promise.all(promises);
}
public async destroy(options?: Omit<WebSocketShardDestroyOptions, 'recover'>) {
const promises = [];
for (const shard of this.shards.values()) {
promises.push(shard.destroy(options));
}
await Promise.all(promises);
this.shards.clear();
}
public send(shardId: number, payload: GatewaySendPayload) {
const shard = this.shards.get(shardId);
if (!shard) throw new Error(`Shard ${shardId} not found`);
return shard.send(payload);
}
}

View File

@@ -0,0 +1,203 @@
import { once } from 'node:events';
import { join } from 'node:path';
import { Worker } from 'node:worker_threads';
import { Collection } from '@discordjs/collection';
import type { GatewaySendPayload } from 'discord-api-types/v10';
import type { IShardingStrategy } from './IShardingStrategy';
import { IdentifyThrottler } from '../../utils/IdentifyThrottler';
import type { SessionInfo, WebSocketManager } from '../../ws/WebSocketManager';
import type { WebSocketShardDestroyOptions, WebSocketShardEvents } from '../../ws/WebSocketShard';
import { FetchingStrategyOptions, managerToFetchingStrategyOptions } from '../context/IContextFetchingStrategy';
export interface WorkerData extends FetchingStrategyOptions {
shardIds: number[];
}
export enum WorkerSendPayloadOp {
Connect,
Destroy,
Send,
SessionInfoResponse,
}
export type WorkerSendPayload =
| { op: WorkerSendPayloadOp.Connect; shardId: number }
| { op: WorkerSendPayloadOp.Destroy; shardId: number; options?: WebSocketShardDestroyOptions }
| { op: WorkerSendPayloadOp.Send; shardId: number; payload: GatewaySendPayload }
| { op: WorkerSendPayloadOp.SessionInfoResponse; nonce: number; session: SessionInfo | null };
export enum WorkerRecievePayloadOp {
Connected,
Destroyed,
Event,
RetrieveSessionInfo,
UpdateSessionInfo,
}
export type WorkerRecievePayload =
| { op: WorkerRecievePayloadOp.Connected; shardId: number }
| { op: WorkerRecievePayloadOp.Destroyed; shardId: number }
// Can't seem to get a type-safe union based off of the event, so I'm sadly leaving data as any for now
| { op: WorkerRecievePayloadOp.Event; shardId: number; event: WebSocketShardEvents; data: any }
| { op: WorkerRecievePayloadOp.RetrieveSessionInfo; shardId: number; nonce: number }
| { op: WorkerRecievePayloadOp.UpdateSessionInfo; shardId: number; session: SessionInfo | null };
/**
* Options for a {@link WorkerShardingStrategy}
*/
export interface WorkerShardingStrategyOptions {
/**
* Dictates how many shards should be spawned per worker thread.
*/
shardsPerWorker: number | 'all';
}
/**
* Strategy used to spawn threads in worker_threads
*/
export class WorkerShardingStrategy implements IShardingStrategy {
private readonly manager: WebSocketManager;
private readonly options: WorkerShardingStrategyOptions;
#workers: Worker[] = [];
readonly #workerByShardId = new Collection<number, Worker>();
private readonly connectPromises = new Collection<number, () => void>();
private readonly destroyPromises = new Collection<number, () => void>();
private readonly throttler: IdentifyThrottler;
public constructor(manager: WebSocketManager, options: WorkerShardingStrategyOptions) {
this.manager = manager;
this.throttler = new IdentifyThrottler(manager);
this.options = options;
}
public async spawn(shardIds: number[]) {
const shardsPerWorker = this.options.shardsPerWorker === 'all' ? shardIds.length : this.options.shardsPerWorker;
const strategyOptions = await managerToFetchingStrategyOptions(this.manager);
let shards = 0;
while (shards !== shardIds.length) {
const slice = shardIds.slice(shards, shardsPerWorker + shards);
const workerData: WorkerData = {
...strategyOptions,
shardIds: slice,
};
const worker = new Worker(join(__dirname, 'worker.cjs'), { workerData });
await once(worker, 'online');
worker
.on('error', (err) => {
throw err;
})
.on('messageerror', (err) => {
throw err;
})
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.on('message', (payload: WorkerRecievePayload) => this.onMessage(worker, payload));
this.#workers.push(worker);
for (const shardId of slice) {
this.#workerByShardId.set(shardId, worker);
}
shards += slice.length;
}
}
public async connect() {
const promises = [];
for (const [shardId, worker] of this.#workerByShardId.entries()) {
await this.throttler.waitForIdentify();
const payload: WorkerSendPayload = {
op: WorkerSendPayloadOp.Connect,
shardId,
};
const promise = new Promise<void>((resolve) => this.connectPromises.set(shardId, resolve));
worker.postMessage(payload);
promises.push(promise);
}
await Promise.all(promises);
}
public async destroy(options: Omit<WebSocketShardDestroyOptions, 'recover'> = {}) {
const promises = [];
for (const [shardId, worker] of this.#workerByShardId.entries()) {
const payload: WorkerSendPayload = {
op: WorkerSendPayloadOp.Destroy,
shardId,
options,
};
promises.push(
new Promise<void>((resolve) => this.destroyPromises.set(shardId, resolve)).then(() => worker.terminate()),
);
worker.postMessage(payload);
}
this.#workers = [];
this.#workerByShardId.clear();
await Promise.all(promises);
}
public send(shardId: number, data: GatewaySendPayload) {
const worker = this.#workerByShardId.get(shardId);
if (!worker) {
throw new Error(`No worker found for shard ${shardId}`);
}
const payload: WorkerSendPayload = {
op: WorkerSendPayloadOp.Send,
shardId,
payload: data,
};
worker.postMessage(payload);
}
private async onMessage(worker: Worker, payload: WorkerRecievePayload) {
switch (payload.op) {
case WorkerRecievePayloadOp.Connected: {
const resolve = this.connectPromises.get(payload.shardId)!;
resolve();
this.connectPromises.delete(payload.shardId);
break;
}
case WorkerRecievePayloadOp.Destroyed: {
const resolve = this.destroyPromises.get(payload.shardId)!;
resolve();
this.destroyPromises.delete(payload.shardId);
break;
}
case WorkerRecievePayloadOp.Event: {
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
this.manager.emit(payload.event, { ...payload.data, shardId: payload.shardId });
break;
}
case WorkerRecievePayloadOp.RetrieveSessionInfo: {
const session = await this.manager.options.retrieveSessionInfo(payload.shardId);
const response: WorkerSendPayload = {
op: WorkerSendPayloadOp.SessionInfoResponse,
nonce: payload.nonce,
session,
};
worker.postMessage(response);
break;
}
case WorkerRecievePayloadOp.UpdateSessionInfo: {
await this.manager.options.updateSessionInfo(payload.shardId, payload.session);
break;
}
}
}
}

View File

@@ -0,0 +1,93 @@
import { isMainThread, workerData, parentPort } from 'node:worker_threads';
import { Collection } from '@discordjs/collection';
import {
WorkerData,
WorkerRecievePayload,
WorkerRecievePayloadOp,
WorkerSendPayload,
WorkerSendPayloadOp,
} from './WorkerShardingStrategy';
import { WebSocketShard, WebSocketShardDestroyOptions, WebSocketShardEvents } from '../../ws/WebSocketShard';
import { WorkerContextFetchingStrategy } from '../context/WorkerContextFetchingStrategy';
if (isMainThread) {
throw new Error('Expected worker script to not be ran within the main thread');
}
const data = workerData as WorkerData;
const shards = new Collection<number, WebSocketShard>();
async function connect(shardId: number) {
const shard = shards.get(shardId);
if (!shard) {
throw new Error(`Shard ${shardId} does not exist`);
}
await shard.connect();
}
async function destroy(shardId: number, options?: WebSocketShardDestroyOptions) {
const shard = shards.get(shardId);
if (!shard) {
throw new Error(`Shard ${shardId} does not exist`);
}
await shard.destroy(options);
}
for (const shardId of data.shardIds) {
const shard = new WebSocketShard(new WorkerContextFetchingStrategy(data), shardId);
for (const event of Object.values(WebSocketShardEvents)) {
// @ts-expect-error
shard.on(event, (data) => {
const payload: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Event,
event,
data,
shardId,
};
parentPort!.postMessage(payload);
});
}
shards.set(shardId, shard);
}
parentPort!
.on('messageerror', (err) => {
throw err;
})
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.on('message', async (payload: WorkerSendPayload) => {
switch (payload.op) {
case WorkerSendPayloadOp.Connect: {
await connect(payload.shardId);
const response: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Connected,
shardId: payload.shardId,
};
parentPort!.postMessage(response);
break;
}
case WorkerSendPayloadOp.Destroy: {
await destroy(payload.shardId, payload.options);
const response: WorkerRecievePayload = {
op: WorkerRecievePayloadOp.Destroyed,
shardId: payload.shardId,
};
parentPort!.postMessage(response);
break;
}
case WorkerSendPayloadOp.Send: {
const shard = shards.get(payload.shardId);
if (!shard) {
throw new Error(`Shard ${payload.shardId} does not exist`);
}
await shard.send(payload.payload);
break;
}
case WorkerSendPayloadOp.SessionInfoResponse: {
break;
}
}
});

View File

@@ -0,0 +1,29 @@
import { setTimeout as sleep } from 'node:timers/promises';
import type { WebSocketManager } from '../ws/WebSocketManager';
export class IdentifyThrottler {
private identifyState = {
remaining: 0,
resetsAt: Infinity,
};
public constructor(private readonly manager: WebSocketManager) {}
public async waitForIdentify(): Promise<void> {
if (this.identifyState.remaining <= 0) {
const diff = this.identifyState.resetsAt - Date.now();
if (diff <= 5_000) {
const time = diff + Math.random() * 1_500;
await sleep(time);
}
const info = await this.manager.fetchGatewayInformation();
this.identifyState = {
remaining: info.session_start_limit.max_concurrency,
resetsAt: Date.now() + 5_000,
};
}
this.identifyState.remaining--;
}
}

View File

@@ -0,0 +1,68 @@
import { readFileSync } from 'node:fs';
import { join } from 'node:path';
import { Collection } from '@discordjs/collection';
import { APIVersion, GatewayOpcodes } from 'discord-api-types/v10';
import { lazy } from './utils';
import type { OptionalWebSocketManagerOptions, SessionInfo } from '../ws/WebSocketManager';
/**
* Valid encoding types
*/
export enum Encoding {
JSON = 'json',
}
/**
* Valid compression methods
*/
export enum CompressionMethod {
ZlibStream = 'zlib-stream',
}
const packageJson = readFileSync(join(__dirname, '..', '..', 'package.json'), 'utf8');
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const Package = JSON.parse(packageJson);
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions, @typescript-eslint/no-unsafe-member-access
export const DefaultDeviceProperty = `@discordjs/ws ${Package.version}`;
const getDefaultSessionStore = lazy(() => new Collection<number, SessionInfo | null>());
/**
* Default options used by the manager
*/
export const DefaultWebSocketManagerOptions: OptionalWebSocketManagerOptions = {
shardCount: null,
shardIds: null,
largeThreshold: null,
initialPresence: null,
identifyProperties: {
browser: DefaultDeviceProperty,
device: DefaultDeviceProperty,
os: process.platform,
},
version: APIVersion,
encoding: Encoding.JSON,
compression: null,
retrieveSessionInfo(shardId) {
const store = getDefaultSessionStore();
return store.get(shardId) ?? null;
},
updateSessionInfo(shardId: number, info: SessionInfo | null) {
const store = getDefaultSessionStore();
if (info) {
store.set(shardId, info);
} else {
store.delete(shardId);
}
},
handshakeTimeout: 30_000,
helloTimeout: 60_000,
readyTimeout: 15_000,
};
export const ImportantGatewayOpcodes = new Set([
GatewayOpcodes.Heartbeat,
GatewayOpcodes.Identify,
GatewayOpcodes.Resume,
]);

View File

@@ -0,0 +1,20 @@
import type { ShardRange } from '../ws/WebSocketManager';
export type Awaitable<T> = T | Promise<T>;
/**
* Yields the numbers in the given range as an array
* @example
* range({ start: 3, end: 5 }); // [3, 4, 5]
*/
export function range({ start, end }: ShardRange): number[] {
return Array.from({ length: end - start + 1 }, (_, i) => i + start);
}
/**
* Lazily evaluate a callback, storing its result
*/
export function lazy<T>(cb: () => T): () => T {
let defaultValue: T;
return () => (defaultValue ??= cb());
}

View File

@@ -0,0 +1,274 @@
import type { REST } from '@discordjs/rest';
import { AsyncEventEmitter } from '@vladfrangu/async_event_emitter';
import {
APIGatewayBotInfo,
GatewayIdentifyProperties,
GatewayPresenceUpdateData,
RESTGetAPIGatewayBotResult,
GatewayIntentBits,
Routes,
GatewaySendPayload,
} from 'discord-api-types/v10';
import type { WebSocketShardDestroyOptions, WebSocketShardEventsMap } from './WebSocketShard';
import type { IShardingStrategy } from '../strategies/sharding/IShardingStrategy';
import { SimpleShardingStrategy } from '../strategies/sharding/SimpleShardingStrategy';
import { CompressionMethod, DefaultWebSocketManagerOptions, Encoding } from '../utils/constants';
import { Awaitable, range } from '../utils/utils';
/**
* Represents a range of shard ids
*/
export interface ShardRange {
start: number;
end: number;
}
/**
* Session information for a given shard, used to resume a session
*/
export interface SessionInfo {
/**
* Session id for this shard
*/
sessionId: string;
/**
* The sequence number of the last message sent by the shard
*/
sequence: number;
/**
* The id of the shard
*/
shardId: number;
/**
* The total number of shards at the time of this shard identifying
*/
shardCount: number;
}
/**
* Required options for the WebSocketManager
*/
export interface RequiredWebSocketManagerOptions {
/**
* The token to use for identifying with the gateway
*/
token: string;
/**
* The intents to request
*/
intents: GatewayIntentBits;
/**
* The REST instance to use for fetching gateway information
*/
rest: REST;
}
/**
* Optional additional configuration for the WebSocketManager
*/
export interface OptionalWebSocketManagerOptions {
/**
* The total number of shards across all WebsocketManagers you intend to instantiate.
* Use `null` to use Discord's recommended shard count
*/
shardCount: number | null;
/**
* The ids of the shards this WebSocketManager should manage.
* Use `null` to simply spawn 0 through `shardCount - 1`
* @example
* const manager = new WebSocketManager({
* shardIds: [1, 3, 7], // spawns shard 1, 3, and 7, nothing else
* });
* @example
* const manager = new WebSocketManager({
* shardIds: {
* start: 3,
* end: 6,
* }, // spawns shards 3, 4, 5, and 6
* });
*/
shardIds: number[] | ShardRange | null;
/**
* Value between 50 and 250, total number of members where the gateway will stop sending offline members in the guild member list
*/
largeThreshold: number | null;
/**
* Initial presence data to send to the gateway when identifying
*/
initialPresence: GatewayPresenceUpdateData | null;
/**
* Properties to send to the gateway when identifying
*/
identifyProperties: GatewayIdentifyProperties;
/**
* The gateway version to use
* @default '10'
*/
version: string;
/**
* The encoding to use
* @default 'json'
*/
encoding: Encoding;
/**
* The compression method to use
* @default null (no compression)
*/
compression: CompressionMethod | null;
/**
* Function used to retrieve session information (and attempt to resume) for a given shard
* @example
* const manager = new WebSocketManager({
* async retrieveSessionInfo(shardId): Awaitable<SessionInfo | null> {
* // Fetch this info from redis or similar
* return { sessionId: string, sequence: number };
* // Return null if no information is found
* },
* });
*/
retrieveSessionInfo: (shardId: number) => Awaitable<SessionInfo | null>;
/**
* Function used to store session information for a given shard
*/
updateSessionInfo: (shardId: number, sessionInfo: SessionInfo | null) => Awaitable<void>;
/**
* How long to wait for a shard to connect before giving up
*/
handshakeTimeout: number | null;
/**
* How long to wait for a shard's HELLO packet before giving up
*/
helloTimeout: number | null;
/**
* How long to wait for a shard's READY packet before giving up
*/
readyTimeout: number | null;
}
export type WebSocketManagerOptions = RequiredWebSocketManagerOptions & OptionalWebSocketManagerOptions;
export type ManagerShardEventsMap = {
[K in keyof WebSocketShardEventsMap]: [
WebSocketShardEventsMap[K] extends [] ? { shardId: number } : WebSocketShardEventsMap[K][0] & { shardId: number },
];
};
export class WebSocketManager extends AsyncEventEmitter<ManagerShardEventsMap> {
/**
* The options being used by this manager
*/
public readonly options: WebSocketManagerOptions;
/**
* Internal cache for a GET /gateway/bot result
*/
private gatewayInformation: {
data: APIGatewayBotInfo;
expiresAt: number;
} | null = null;
/**
* Internal cache for the shard ids
*/
private shardIds: number[] | null = null;
/**
* Strategy used to manage shards
* @default SimpleManagerToShardStrategy
*/
private strategy: IShardingStrategy = new SimpleShardingStrategy(this);
public constructor(options: RequiredWebSocketManagerOptions & Partial<OptionalWebSocketManagerOptions>) {
super();
this.options = { ...DefaultWebSocketManagerOptions, ...options };
}
public setStrategy(strategy: IShardingStrategy) {
this.strategy = strategy;
return this;
}
/**
* Fetches the gateway information from Discord - or returns it from cache if available
* @param force Whether to ignore the cache and force a fresh fetch
*/
public async fetchGatewayInformation(force = false) {
if (this.gatewayInformation) {
if (this.gatewayInformation.expiresAt <= Date.now()) {
this.gatewayInformation = null;
} else if (!force) {
return this.gatewayInformation.data;
}
}
const data = (await this.options.rest.get(Routes.gatewayBot())) as RESTGetAPIGatewayBotResult;
this.gatewayInformation = { data, expiresAt: Date.now() + data.session_start_limit.reset_after };
return this.gatewayInformation.data;
}
/**
* Updates your total shard count on-the-fly, spawning shards as needed
* @param shardCount The new shard count to use
*/
public async updateShardCount(shardCount: number | null) {
await this.strategy.destroy({ reason: 'User is adjusting their shards' });
this.options.shardCount = shardCount;
const shardIds = await this.getShardIds(true);
await this.strategy.spawn(shardIds);
return this;
}
/**
* Yields the total number of shards across for your bot, accounting for Discord recommendations
*/
public async getShardCount(): Promise<number> {
if (this.options.shardCount) {
return this.options.shardCount;
}
const shardIds = await this.getShardIds();
return Math.max(...shardIds) + 1;
}
/**
* Yields the ids of the shards this manager should manage
*/
public async getShardIds(force = false): Promise<number[]> {
if (this.shardIds && !force) {
return this.shardIds;
}
let shardIds: number[];
if (this.options.shardIds) {
if (Array.isArray(this.options.shardIds)) {
shardIds = this.options.shardIds;
} else {
shardIds = range(this.options.shardIds);
}
} else {
const data = await this.fetchGatewayInformation();
shardIds = range({ start: 0, end: (this.options.shardCount ?? data.shards) - 1 });
}
this.shardIds = shardIds;
return shardIds;
}
public async connect() {
const shardCount = await this.getShardCount();
// First, make sure all our shards are spawned
await this.updateShardCount(shardCount);
await this.strategy.connect();
}
public destroy(options?: Omit<WebSocketShardDestroyOptions, 'recover'>) {
return this.strategy.destroy(options);
}
public send(shardId: number, payload: GatewaySendPayload) {
return this.strategy.send(shardId, payload);
}
}

View File

@@ -0,0 +1,549 @@
import { once } from 'node:events';
import { setTimeout } from 'node:timers';
import { setTimeout as sleep } from 'node:timers/promises';
import { TextDecoder } from 'node:util';
import { inflate } from 'node:zlib';
import { Collection } from '@discordjs/collection';
import { AsyncQueue } from '@sapphire/async-queue';
import { AsyncEventEmitter } from '@vladfrangu/async_event_emitter';
import {
GatewayCloseCodes,
GatewayDispatchEvents,
GatewayDispatchPayload,
GatewayIdentifyData,
GatewayOpcodes,
GatewayReceivePayload,
GatewaySendPayload,
} from 'discord-api-types/v10';
import { CONNECTING, OPEN, RawData, WebSocket } from 'ws';
import type { Inflate } from 'zlib-sync';
import type { SessionInfo } from './WebSocketManager';
import type { IContextFetchingStrategy } from '../strategies/context/IContextFetchingStrategy';
import { ImportantGatewayOpcodes } from '../utils/constants';
import { lazy } from '../utils/utils';
const getZlibSync = lazy(() => import('zlib-sync').then((mod) => mod.default).catch(() => null));
export enum WebSocketShardEvents {
Debug = 'debug',
Hello = 'hello',
Ready = 'ready',
Resumed = 'resumed',
Dispatch = 'dispatch',
}
export enum WebSocketShardStatus {
Idle,
Connecting,
Resuming,
Ready,
}
export enum WebSocketShardDestroyRecovery {
Reconnect,
Resume,
}
// eslint-disable-next-line @typescript-eslint/consistent-type-definitions
export type WebSocketShardEventsMap = {
[WebSocketShardEvents.Debug]: [payload: { message: string }];
[WebSocketShardEvents.Hello]: [];
[WebSocketShardEvents.Ready]: [];
[WebSocketShardEvents.Resumed]: [];
[WebSocketShardEvents.Dispatch]: [payload: { data: GatewayDispatchPayload }];
};
export interface WebSocketShardDestroyOptions {
reason?: string;
code?: number;
recover?: WebSocketShardDestroyRecovery;
}
export enum CloseCodes {
Normal = 1000,
Resuming = 4200,
}
export class WebSocketShard extends AsyncEventEmitter<WebSocketShardEventsMap> {
private connection: WebSocket | null = null;
private readonly id: number;
private useIdentifyCompress = false;
private inflate: Inflate | null = null;
private readonly textDecoder = new TextDecoder();
private status: WebSocketShardStatus = WebSocketShardStatus.Idle;
private replayedEvents = 0;
private isAck = true;
private sendRateLimitState = {
remaining: 120,
resetAt: Date.now(),
};
private heartbeatInterval: NodeJS.Timer | null = null;
private lastHeartbeatAt = -1;
private session: SessionInfo | null = null;
private readonly sendQueue = new AsyncQueue();
private readonly timeouts = new Collection<WebSocketShardEvents, NodeJS.Timeout>();
public readonly strategy: IContextFetchingStrategy;
public constructor(strategy: IContextFetchingStrategy, id: number) {
super();
this.strategy = strategy;
this.id = id;
}
public async connect() {
if (this.status !== WebSocketShardStatus.Idle) {
throw new Error("Tried to connect a shard that wasn't idle");
}
const data = this.strategy.options.gatewayInformation;
const { version, encoding, compression } = this.strategy.options;
const params = new URLSearchParams({ v: version, encoding });
if (compression) {
const zlib = await getZlibSync();
if (zlib) {
params.append('compress', compression);
this.inflate = new zlib.Inflate({
chunkSize: 65535,
to: 'string',
});
} else if (!this.useIdentifyCompress) {
this.useIdentifyCompress = true;
console.warn(
'WebSocketShard: Compression is enabled but zlib-sync is not installed, falling back to identify compress',
);
}
}
const url = `${data.url}?${params.toString()}`;
this.debug([`Connecting to ${url}`]);
const connection = new WebSocket(url, { handshakeTimeout: this.strategy.options.handshakeTimeout ?? undefined })
/* eslint-disable @typescript-eslint/no-misused-promises */
.on('message', this.onMessage.bind(this))
.on('error', this.onError.bind(this))
.on('close', this.onClose.bind(this));
/* eslint-enable @typescript-eslint/no-misused-promises */
connection.binaryType = 'arraybuffer';
this.connection = connection;
this.status = WebSocketShardStatus.Connecting;
await this.waitForEvent(WebSocketShardEvents.Hello, this.strategy.options.helloTimeout);
const session = this.session ?? (await this.strategy.retrieveSessionInfo(this.id));
if (session?.shardCount === this.strategy.options.shardCount) {
this.session = session;
await this.resume(session);
} else {
await this.identify();
}
}
public async destroy(options: WebSocketShardDestroyOptions = {}) {
if (this.status === WebSocketShardStatus.Idle) {
this.debug(['Tried to destroy a shard that was idle']);
return;
}
if (!options.code) {
options.code = options.recover === WebSocketShardDestroyRecovery.Resume ? CloseCodes.Resuming : CloseCodes.Normal;
}
this.debug([
'Destroying shard',
`Reason: ${options.reason ?? 'none'}`,
`Code: ${options.code}`,
`Recover: ${options.recover === undefined ? 'none' : WebSocketShardDestroyRecovery[options.recover]!}`,
]);
// Reset state
this.isAck = true;
if (this.heartbeatInterval) {
clearInterval(this.heartbeatInterval);
}
this.lastHeartbeatAt = -1;
// Clear session state if applicable
if (options.recover !== WebSocketShardDestroyRecovery.Resume && this.session) {
this.session = null;
await this.strategy.updateSessionInfo(this.id, null);
}
if (this.connection && (this.connection.readyState === OPEN || this.connection.readyState === CONNECTING)) {
this.connection.close(options.code, options.reason);
}
this.status = WebSocketShardStatus.Idle;
if (options.recover !== undefined) {
return this.connect();
}
}
private async waitForEvent(event: WebSocketShardEvents, timeoutDuration?: number | null) {
this.debug([`Waiting for event ${event} for ${timeoutDuration ? `${timeoutDuration}ms` : 'indefinitely'}`]);
const controller = new AbortController();
const timeout = timeoutDuration ? setTimeout(() => controller.abort(), timeoutDuration).unref() : null;
if (timeout) {
this.timeouts.set(event, timeout);
}
await once(this, event, { signal: controller.signal });
if (timeout) {
clearTimeout(timeout);
this.timeouts.delete(event);
}
}
public async send(payload: GatewaySendPayload) {
if (!this.connection) {
throw new Error("WebSocketShard wasn't connected");
}
if (this.status !== WebSocketShardStatus.Ready && !ImportantGatewayOpcodes.has(payload.op)) {
await once(this, WebSocketShardEvents.Ready);
}
await this.sendQueue.wait();
if (--this.sendRateLimitState.remaining <= 0) {
if (this.sendRateLimitState.resetAt < Date.now()) {
await sleep(Date.now() - this.sendRateLimitState.resetAt);
}
this.sendRateLimitState = {
remaining: 119,
resetAt: Date.now() + 60_000,
};
}
this.sendQueue.shift();
this.connection.send(JSON.stringify(payload));
}
private async identify() {
this.debug([
'Identifying',
`shard id: ${this.id.toString()}`,
`shard count: ${this.strategy.options.shardCount}`,
`intents: ${this.strategy.options.intents}`,
`compression: ${this.inflate ? 'zlib-stream' : this.useIdentifyCompress ? 'identify' : 'none'}`,
]);
const d: GatewayIdentifyData = {
token: this.strategy.options.token,
properties: this.strategy.options.identifyProperties,
intents: this.strategy.options.intents,
compress: this.useIdentifyCompress,
shard: [this.id, this.strategy.options.shardCount],
};
if (this.strategy.options.largeThreshold) {
d.large_threshold = this.strategy.options.largeThreshold;
}
if (this.strategy.options.initialPresence) {
d.presence = this.strategy.options.initialPresence;
}
await this.send({
op: GatewayOpcodes.Identify,
d,
});
await this.waitForEvent(WebSocketShardEvents.Ready, this.strategy.options.readyTimeout);
this.status = WebSocketShardStatus.Ready;
}
private resume(session: SessionInfo) {
this.debug(['Resuming session']);
this.status = WebSocketShardStatus.Resuming;
this.replayedEvents = 0;
return this.send({
op: GatewayOpcodes.Resume,
d: {
token: this.strategy.options.token,
seq: session.sequence,
session_id: session.sessionId,
},
});
}
private async heartbeat(requested = false) {
if (!this.isAck && !requested) {
return this.destroy({ reason: 'Zombie connection', recover: WebSocketShardDestroyRecovery.Resume });
}
await this.send({
op: GatewayOpcodes.Heartbeat,
d: this.session?.sequence ?? null,
});
this.lastHeartbeatAt = Date.now();
this.isAck = false;
}
private async unpackMessage(data: Buffer | ArrayBuffer, isBinary: boolean): Promise<GatewayReceivePayload | null> {
const decompressable = new Uint8Array(data);
// Deal with no compression
if (!isBinary) {
return JSON.parse(this.textDecoder.decode(decompressable)) as GatewayReceivePayload;
}
// Deal with identify compress
if (this.useIdentifyCompress) {
return new Promise((resolve, reject) => {
inflate(decompressable, { chunkSize: 65535 }, (err, result) => {
if (err) {
return reject(err);
}
resolve(JSON.parse(this.textDecoder.decode(result)) as GatewayReceivePayload);
});
});
}
// Deal with gw wide zlib-stream compression
if (this.inflate) {
const l = decompressable.length;
const flush =
l >= 4 &&
decompressable[l - 4] === 0x00 &&
decompressable[l - 3] === 0x00 &&
decompressable[l - 2] === 0xff &&
decompressable[l - 1] === 0xff;
const zlib = (await getZlibSync())!;
this.inflate.push(Buffer.from(decompressable), flush ? zlib.Z_SYNC_FLUSH : zlib.Z_NO_FLUSH);
if (this.inflate.err) {
this.emit('error', `${this.inflate.err}${this.inflate.msg ? `: ${this.inflate.msg}` : ''}`);
}
if (!flush) {
return null;
}
const { result } = this.inflate;
if (!result) {
return null;
}
return JSON.parse(typeof result === 'string' ? result : this.textDecoder.decode(result)) as GatewayReceivePayload;
}
this.debug([
'Received a message we were unable to decompress',
`isBinary: ${isBinary.toString()}`,
`useIdentifyCompress: ${this.useIdentifyCompress.toString()}`,
`inflate: ${Boolean(this.inflate).toString()}`,
]);
return null;
}
private async onMessage(data: RawData, isBinary: boolean) {
const payload = await this.unpackMessage(data as Buffer | ArrayBuffer, isBinary);
if (!payload) {
return;
}
switch (payload.op) {
case GatewayOpcodes.Dispatch: {
if (this.status === WebSocketShardStatus.Ready || this.status === WebSocketShardStatus.Resuming) {
this.emit(WebSocketShardEvents.Dispatch, { data: payload });
}
if (this.status === WebSocketShardStatus.Resuming) {
this.replayedEvents++;
}
switch (payload.t) {
case GatewayDispatchEvents.Ready: {
this.emit(WebSocketShardEvents.Ready);
this.session ??= {
sequence: payload.s,
sessionId: payload.d.session_id,
shardId: this.id,
shardCount: this.strategy.options.shardCount,
};
await this.strategy.updateSessionInfo(this.id, this.session);
break;
}
case GatewayDispatchEvents.Resumed: {
this.status = WebSocketShardStatus.Ready;
this.debug([`Resumed and replayed ${this.replayedEvents} events`]);
this.emit(WebSocketShardEvents.Resumed);
break;
}
default: {
break;
}
}
if (this.session) {
if (payload.s > this.session.sequence) {
this.session.sequence = payload.s;
await this.strategy.updateSessionInfo(this.id, this.session);
}
}
break;
}
case GatewayOpcodes.Heartbeat: {
await this.heartbeat(true);
break;
}
case GatewayOpcodes.Reconnect: {
await this.destroy({
reason: 'Told to reconnect by Discord',
recover: WebSocketShardDestroyRecovery.Resume,
});
break;
}
case GatewayOpcodes.InvalidSession: {
const readyTimeout = this.timeouts.get(WebSocketShardEvents.Ready);
readyTimeout?.refresh();
this.debug([`Invalid session; will attempt to resume: ${payload.d.toString()}`]);
const session = this.session ?? (await this.strategy.retrieveSessionInfo(this.id));
if (payload.d && session) {
await this.resume(session);
} else {
await this.destroy({
reason: 'Invalid session',
recover: WebSocketShardDestroyRecovery.Reconnect,
});
}
break;
}
case GatewayOpcodes.Hello: {
this.emit(WebSocketShardEvents.Hello);
this.debug([`Starting to heartbeat every ${payload.d.heartbeat_interval}ms`]);
this.heartbeatInterval = setInterval(() => void this.heartbeat(), payload.d.heartbeat_interval);
break;
}
case GatewayOpcodes.HeartbeatAck: {
this.isAck = true;
this.debug([`Got heartbeat ack after ${Date.now() - this.lastHeartbeatAt}ms`]);
break;
}
}
}
private onError(err: Error) {
this.emit('error', { err });
}
private async onClose(code: number) {
switch (code) {
case 1000:
case 4200: {
this.debug([`Disconnected normally from code ${code}`]);
break;
}
case GatewayCloseCodes.UnknownError: {
this.debug([`An unknown error occured: ${code}`]);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Resume });
}
case GatewayCloseCodes.UnknownOpcode: {
this.debug(['An invalid opcode was sent to Discord.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Resume });
}
case GatewayCloseCodes.DecodeError: {
this.debug(['An invalid payload was sent to Discord.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Resume });
}
case GatewayCloseCodes.NotAuthenticated: {
this.debug(['A request was somehow sent before the identify/resume payload.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Reconnect });
}
case GatewayCloseCodes.AuthenticationFailed: {
throw new Error('Authentication failed');
}
case GatewayCloseCodes.AlreadyAuthenticated: {
this.debug(['More than one auth payload was sent.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Reconnect });
}
case GatewayCloseCodes.InvalidSeq: {
this.debug(['An invalid sequence was sent.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Reconnect });
}
case GatewayCloseCodes.RateLimited: {
this.debug(['The WebSocket rate limit has been hit, this should never happen']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Reconnect });
}
case GatewayCloseCodes.SessionTimedOut: {
this.debug(['Session timed out.']);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Resume });
}
case GatewayCloseCodes.InvalidShard: {
throw new Error('Invalid shard');
}
case GatewayCloseCodes.ShardingRequired: {
throw new Error('Sharding is required');
}
case GatewayCloseCodes.InvalidAPIVersion: {
throw new Error('Used an invalid API version');
}
case GatewayCloseCodes.InvalidIntents: {
throw new Error('Used invalid intents');
}
case GatewayCloseCodes.DisallowedIntents: {
throw new Error('Used disallowed intents');
}
default: {
this.debug([`The gateway closed with an unexpected code ${code}, attempting to resume.`]);
return this.destroy({ code, recover: WebSocketShardDestroyRecovery.Resume });
}
}
}
private debug(messages: [string, ...string[]]) {
const message = `${messages[0]}${
messages.length > 1
? `\n${messages
.slice(1)
.map((m) => ` ${m}`)
.join('\n')}`
: ''
}`;
this.emit(WebSocketShardEvents.Debug, { message });
}
}

View File

@@ -0,0 +1,20 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"allowJs": true
},
"include": [
"**/*.ts",
"**/*.tsx",
"**/*.js",
"**/*.mjs",
"**/*.jsx",
"**/*.test.ts",
"**/*.test.js",
"**/*.test.mjs",
"**/*.spec.ts",
"**/*.spec.js",
"**/*.spec.mjs"
],
"exclude": []
}

View File

@@ -0,0 +1,4 @@
{
"extends": "../../tsconfig.json",
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,3 @@
import { createTsupConfig } from '../../tsup.config';
export default createTsupConfig({ entry: ['./src/**/*.ts'], bundle: false });

View File

@@ -13,6 +13,7 @@ type ConfigOptions = Pick<
| 'noExternal'
| 'esbuildOptions'
| 'dts'
| 'bundle'
>;
export const createTsupConfig = ({
@@ -32,6 +33,7 @@ export const createTsupConfig = ({
};
}
},
bundle,
}: ConfigOptions = {}) =>
defineConfig({
clean: true,
@@ -47,4 +49,6 @@ export const createTsupConfig = ({
globalName,
noExternal,
esbuildOptions,
bundle,
shims: true,
});

3379
yarn.lock

File diff suppressed because it is too large Load Diff