feat(Voice): implement support for @discordjs/voice (#5402)

This commit is contained in:
Amish Shah
2021-06-09 14:21:19 +01:00
committed by GitHub
parent c4f1c75efa
commit 7b2e12b102
27 changed files with 99 additions and 2395 deletions

View File

@@ -7,6 +7,7 @@ labels: 's: unverified, type: bug'
assignees: ''
---
<!-- Use Discord for questions: https://discord.gg/bRCvFy9 -->
<!-- If you are reporting a voice issue, please post your issue at https://github.com/discordjs/voice/issues -->
**Please describe the problem you are having in as much detail as possible:**

View File

@@ -44,16 +44,6 @@ discord.js is a powerful [Node.js](https://nodejs.org) module that allows you to
**Node.js 14.0.0 or newer is required.**
Ignore any warnings about unmet peer dependencies, as they're all optional.
Without voice support: `npm install discord.js`
With voice support ([@discordjs/opus](https://www.npmjs.com/package/@discordjs/opus)): `npm install discord.js @discordjs/opus`
With voice support ([opusscript](https://www.npmjs.com/package/opusscript)): `npm install discord.js opusscript`
### Audio engines
The preferred audio engine is @discordjs/opus, as it performs significantly better than opusscript. When both are available, discord.js will automatically choose @discordjs/opus.
Using opusscript is only recommended for development environments where @discordjs/opus is tough to get working.
For production bots, using @discordjs/opus should be considered a necessity, especially if they're going to be running on multiple servers.
### Optional packages
- [zlib-sync](https://www.npmjs.com/package/zlib-sync) for WebSocket data compression and inflation (`npm install zlib-sync`)
@@ -63,6 +53,7 @@ For production bots, using @discordjs/opus should be considered a necessity, esp
- [libsodium.js](https://www.npmjs.com/package/libsodium-wrappers) (`npm install libsodium-wrappers`)
- [bufferutil](https://www.npmjs.com/package/bufferutil) for a much faster WebSocket connection (`npm install bufferutil`)
- [utf-8-validate](https://www.npmjs.com/package/utf-8-validate) in combination with `bufferutil` for much faster WebSocket processing (`npm install utf-8-validate`)
- [@discordjs/voice](https://github.com/discordjs/voice) for interacting with the Discord Voice API
## Example usage

55
package-lock.json generated
View File

@@ -15,14 +15,13 @@
"abort-controller": "^3.0.0",
"discord-api-types": "^0.18.1",
"node-fetch": "^2.6.1",
"prism-media": "^1.2.9",
"tweetnacl": "^1.0.3",
"ws": "^7.4.6"
},
"devDependencies": {
"@commitlint/cli": "^12.1.4",
"@commitlint/config-angular": "^12.1.4",
"@discordjs/docgen": "^0.10.0",
"@discordjs/voice": "^0.3.0",
"@types/node": "^12.12.6",
"conventional-changelog-cli": "^2.1.1",
"cross-env": "^7.0.3",
@@ -1522,6 +1521,19 @@
"node": ">= 6"
}
},
"node_modules/@discordjs/voice": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@discordjs/voice/-/voice-0.3.0.tgz",
"integrity": "sha512-jPtzfjCmHe1JmWbwsQ7YYfSHBaglVy5ewDROL4BQpyA60Dpo54ksB0Hv1T2L/B7tRM5nCMo5PDuElaZOBqaTmA==",
"dev": true,
"dependencies": {
"@types/ws": "^7.4.4",
"discord-api-types": "^0.18.1",
"prism-media": "^1.2.9",
"tiny-typed-emitter": "^2.0.3",
"ws": "^7.4.4"
}
},
"node_modules/@eslint/eslintrc": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.1.tgz",
@@ -9181,6 +9193,7 @@
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/prism-media/-/prism-media-1.2.9.tgz",
"integrity": "sha512-UHCYuqHipbTR1ZsXr5eg4JUmHER8Ss4YEb9Azn+9zzJ7/jlTtD1h0lc4g6tNx3eMlB8Mp6bfll0LPMAV4R6r3Q==",
"dev": true,
"peerDependencies": {
"@discordjs/opus": "^0.5.0",
"ffmpeg-static": "^4.2.7 || ^3.0.0 || ^2.4.0",
@@ -10552,6 +10565,12 @@
"node": ">= 6"
}
},
"node_modules/tiny-typed-emitter": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/tiny-typed-emitter/-/tiny-typed-emitter-2.0.3.tgz",
"integrity": "sha512-MaCqhHlp6EAWN25yqBlajgd4scxxI2eJr7+EgoUAOV9UkMU3us/yp2bEnc2yOvyeDF8TUWuaz3zZCPGTKFJIpA==",
"dev": true
},
"node_modules/tmp": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
@@ -10833,11 +10852,6 @@
"node": "*"
}
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
"integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw=="
},
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
@@ -12629,6 +12643,19 @@
"mime-types": "^2.1.12"
}
},
"@discordjs/voice": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@discordjs/voice/-/voice-0.3.0.tgz",
"integrity": "sha512-jPtzfjCmHe1JmWbwsQ7YYfSHBaglVy5ewDROL4BQpyA60Dpo54ksB0Hv1T2L/B7tRM5nCMo5PDuElaZOBqaTmA==",
"dev": true,
"requires": {
"@types/ws": "^7.4.4",
"discord-api-types": "^0.18.1",
"prism-media": "^1.2.9",
"tiny-typed-emitter": "^2.0.3",
"ws": "^7.4.4"
}
},
"@eslint/eslintrc": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.1.tgz",
@@ -18552,7 +18579,8 @@
"prism-media": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/prism-media/-/prism-media-1.2.9.tgz",
"integrity": "sha512-UHCYuqHipbTR1ZsXr5eg4JUmHER8Ss4YEb9Azn+9zzJ7/jlTtD1h0lc4g6tNx3eMlB8Mp6bfll0LPMAV4R6r3Q=="
"integrity": "sha512-UHCYuqHipbTR1ZsXr5eg4JUmHER8Ss4YEb9Azn+9zzJ7/jlTtD1h0lc4g6tNx3eMlB8Mp6bfll0LPMAV4R6r3Q==",
"dev": true
},
"process-nextick-args": {
"version": "2.0.1",
@@ -19637,6 +19665,12 @@
}
}
},
"tiny-typed-emitter": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/tiny-typed-emitter/-/tiny-typed-emitter-2.0.3.tgz",
"integrity": "sha512-MaCqhHlp6EAWN25yqBlajgd4scxxI2eJr7+EgoUAOV9UkMU3us/yp2bEnc2yOvyeDF8TUWuaz3zZCPGTKFJIpA==",
"dev": true
},
"tmp": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
@@ -19857,11 +19891,6 @@
"safe-buffer": "^5.0.1"
}
},
"tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
"integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw=="
},
"type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",

View File

@@ -49,14 +49,13 @@
"abort-controller": "^3.0.0",
"discord-api-types": "^0.18.1",
"node-fetch": "^2.6.1",
"prism-media": "^1.2.9",
"tweetnacl": "^1.0.3",
"ws": "^7.4.6"
},
"devDependencies": {
"@commitlint/cli": "^12.1.4",
"@commitlint/config-angular": "^12.1.4",
"@discordjs/docgen": "^0.10.0",
"@discordjs/voice": "^0.3.0",
"@types/node": "^12.12.6",
"conventional-changelog-cli": "^2.1.1",
"cross-env": "^7.0.3",

View File

@@ -37,7 +37,7 @@ class GuildDeleteAction extends Action {
}
for (const channel of guild.channels.cache.values()) this.client.channels.remove(channel.id);
guild.me?.voice.connection?.disconnect();
client.voice.adapters.get(data.id)?.destroy();
// Delete guild
client.guilds.cache.delete(guild.id);

View File

@@ -1,9 +1,6 @@
'use strict';
const VoiceBroadcast = require('./VoiceBroadcast');
const VoiceConnection = require('./VoiceConnection');
const { Error } = require('../../errors');
const Collection = require('../../util/Collection');
const { Events } = require('../../util/Constants');
/**
* Manages voice connections for the client
@@ -19,98 +16,29 @@ class ClientVoiceManager {
Object.defineProperty(this, 'client', { value: client });
/**
* A collection mapping connection IDs to the Connection objects
* @type {Collection<Snowflake, VoiceConnection>}
* Maps guild IDs to voice adapters created for use with @discordjs/voice.
* @type {Map<Snowflake, Object>}
*/
this.connections = new Collection();
this.adapters = new Map();
/**
* Active voice broadcasts that have been created
* @type {VoiceBroadcast[]}
*/
this.broadcasts = [];
}
/**
* Creates a voice broadcast.
* @returns {VoiceBroadcast}
*/
createBroadcast() {
const broadcast = new VoiceBroadcast(this.client);
this.broadcasts.push(broadcast);
return broadcast;
}
onVoiceServer({ guild_id, token, endpoint }) {
this.client.emit('debug', `[VOICE] voiceServer guild: ${guild_id} token: ${token} endpoint: ${endpoint}`);
const connection = this.connections.get(guild_id);
if (connection) connection.setTokenAndEndpoint(token, endpoint);
}
onVoiceStateUpdate({ guild_id, session_id, channel_id }) {
const connection = this.connections.get(guild_id);
this.client.emit('debug', `[VOICE] connection? ${!!connection}, ${guild_id} ${session_id} ${channel_id}`);
if (!connection) return;
if (!channel_id) {
connection._disconnect();
this.connections.delete(guild_id);
return;
}
const channel = this.client.channels.cache.get(channel_id);
if (channel) {
connection.channel = channel;
connection.setSessionID(session_id);
} else {
this.client.emit('debug', `[VOICE] disconnecting from guild ${guild_id} as channel ${channel_id} is uncached`);
connection.disconnect();
}
}
/**
* Sets up a request to join a voice or stage channel.
* @param {VoiceChannel|StageChannel} channel The channel to join
* @returns {Promise<VoiceConnection>}
* @private
*/
joinChannel(channel) {
return new Promise((resolve, reject) => {
if (!channel.joinable) {
throw new Error('VOICE_JOIN_CHANNEL', channel.full);
}
let connection = this.connections.get(channel.guild.id);
if (connection) {
if (connection.channel.id !== channel.id) {
this.connections.get(channel.guild.id).updateChannel(channel);
client.on(Events.SHARD_DISCONNECT, (_, shardID) => {
for (const [guildID, adapter] of this.adapters.entries()) {
if (client.guilds.cache.get(guildID)?.shardID === shardID) {
adapter.destroy();
}
resolve(connection);
return;
} else {
connection = new VoiceConnection(this, channel);
connection.on('debug', msg =>
this.client.emit('debug', `[VOICE (${channel.guild.id}:${connection.status})]: ${msg}`),
);
connection.authenticate();
this.connections.set(channel.guild.id, connection);
}
connection.once('failed', reason => {
this.connections.delete(channel.guild.id);
reject(reason);
});
connection.on('error', reject);
connection.once('authenticated', () => {
connection.once('ready', () => {
resolve(connection);
connection.removeListener('error', reject);
});
connection.once('disconnect', () => this.connections.delete(channel.guild.id));
});
});
}
onVoiceServer(payload) {
this.adapters.get(payload.guild_id)?.onVoiceServerUpdate(payload);
}
onVoiceStateUpdate(payload) {
if (payload.guild_id && payload.session_id && payload.user_id === this.client.user?.id) {
this.adapters.get(payload.guild_id)?.onVoiceStateUpdate(payload);
}
}
}
module.exports = ClientVoiceManager;

View File

@@ -1,111 +0,0 @@
'use strict';
const EventEmitter = require('events');
const BroadcastAudioPlayer = require('./player/BroadcastAudioPlayer');
const PlayInterface = require('./util/PlayInterface');
const { Events } = require('../../util/Constants');
/**
* A voice broadcast can be played across multiple voice connections for improved shared-stream efficiency.
*
* Example usage:
* ```js
* const broadcast = client.voice.createBroadcast();
* broadcast.play('./music.mp3');
* // Play "music.mp3" in all voice connections that the client is in
* for (const connection of client.voice.connections.values()) {
* connection.play(broadcast);
* }
* ```
* @implements {PlayInterface}
* @extends {EventEmitter}
*/
class VoiceBroadcast extends EventEmitter {
constructor(client) {
super();
/**
* The client that created the broadcast
* @type {Client}
*/
this.client = client;
/**
* The subscribed StreamDispatchers of this broadcast
* @type {StreamDispatcher[]}
*/
this.subscribers = [];
this.player = new BroadcastAudioPlayer(this);
}
/**
* The current master dispatcher, if any. This dispatcher controls all that is played by subscribed dispatchers.
* @type {?BroadcastDispatcher}
* @readonly
*/
get dispatcher() {
return this.player.dispatcher;
}
/**
* Play an audio resource.
* @param {ReadableStream|string} resource The resource to play.
* @param {StreamOptions} [options] The options to play.
* @example
* // Play a local audio file
* broadcast.play('/home/hydrabolt/audio.mp3', { volume: 0.5 });
* @example
* // Play a ReadableStream
* broadcast.play(ytdl('https://www.youtube.com/watch?v=ZlAU_w7-Xp8', { filter: 'audioonly' }));
* @example
* // Using different protocols: https://ffmpeg.org/ffmpeg-protocols.html
* broadcast.play('http://www.sample-videos.com/audio/mp3/wave.mp3');
* @returns {BroadcastDispatcher}
*/
play() {
return null;
}
/**
* Ends the broadcast, unsubscribing all subscribed channels and deleting the broadcast
*/
end() {
for (const dispatcher of this.subscribers) this.delete(dispatcher);
const index = this.client.voice.broadcasts.indexOf(this);
if (index !== -1) this.client.voice.broadcasts.splice(index, 1);
}
add(dispatcher) {
const index = this.subscribers.indexOf(dispatcher);
if (index === -1) {
this.subscribers.push(dispatcher);
/**
* Emitted whenever a stream dispatcher subscribes to the broadcast.
* @event VoiceBroadcast#subscribe
* @param {StreamDispatcher} subscriber The subscribed dispatcher
*/
this.emit(Events.VOICE_BROADCAST_SUBSCRIBE, dispatcher);
return true;
} else {
return false;
}
}
delete(dispatcher) {
const index = this.subscribers.indexOf(dispatcher);
if (index !== -1) {
this.subscribers.splice(index, 1);
dispatcher.destroy();
/**
* Emitted whenever a stream dispatcher unsubscribes to the broadcast.
* @event VoiceBroadcast#unsubscribe
* @param {StreamDispatcher} dispatcher The unsubscribed dispatcher
*/
this.emit(Events.VOICE_BROADCAST_UNSUBSCRIBE, dispatcher);
return true;
}
return false;
}
}
PlayInterface.applyToClass(VoiceBroadcast);
module.exports = VoiceBroadcast;

View File

@@ -1,526 +0,0 @@
'use strict';
const EventEmitter = require('events');
const VoiceUDP = require('./networking/VoiceUDPClient');
const VoiceWebSocket = require('./networking/VoiceWebSocket');
const AudioPlayer = require('./player/AudioPlayer');
const VoiceReceiver = require('./receiver/Receiver');
const PlayInterface = require('./util/PlayInterface');
const Silence = require('./util/Silence');
const { Error } = require('../../errors');
const { OPCodes, VoiceOPCodes, VoiceStatus, Events } = require('../../util/Constants');
const Speaking = require('../../util/Speaking');
const Util = require('../../util/Util');
// Workaround for Discord now requiring silence to be sent before being able to receive audio
class SingleSilence extends Silence {
_read() {
super._read();
this.push(null);
}
}
const SUPPORTED_MODES = ['xsalsa20_poly1305_lite', 'xsalsa20_poly1305_suffix', 'xsalsa20_poly1305'];
/**
* Represents a connection to a guild's voice server.
* ```js
* // Obtained using:
* voiceChannel.join()
* .then(connection => {
*
* });
* ```
* @extends {EventEmitter}
* @implements {PlayInterface}
*/
class VoiceConnection extends EventEmitter {
constructor(voiceManager, channel) {
super();
/**
* The voice manager that instantiated this connection
* @type {ClientVoiceManager}
*/
this.voiceManager = voiceManager;
/**
* The voice channel or stage channel this connection is currently serving
* @type {VoiceChannel|StageChannel}
*/
this.channel = channel;
/**
* The current status of the voice connection
* @type {VoiceStatus}
*/
this.status = VoiceStatus.AUTHENTICATING;
/**
* Our current speaking state
* @type {Readonly<Speaking>}
*/
this.speaking = new Speaking().freeze();
/**
* The authentication data needed to connect to the voice server
* @type {Object}
* @private
*/
this.authentication = {};
/**
* The audio player for this voice connection
* @type {AudioPlayer}
*/
this.player = new AudioPlayer(this);
this.player.on('debug', m => {
/**
* Debug info from the connection.
* @event VoiceConnection#debug
* @param {string} message The debug message
*/
this.emit('debug', `audio player - ${m}`);
});
this.player.on('error', e => {
/**
* Warning info from the connection.
* @event VoiceConnection#warn
* @param {string|Error} warning The warning
*/
this.emit('warn', e);
});
this.once('closing', () => this.player.destroy());
/**
* Map SSRC values to user IDs
* @type {Map<number, Snowflake>}
* @private
*/
this.ssrcMap = new Map();
/**
* Tracks which users are talking
* @type {Map<Snowflake, Readonly<Speaking>>}
* @private
*/
this._speaking = new Map();
/**
* Object that wraps contains the `ws` and `udp` sockets of this voice connection
* @type {Object}
* @private
*/
this.sockets = {};
/**
* The voice receiver of this connection
* @type {VoiceReceiver}
*/
this.receiver = new VoiceReceiver(this);
}
/**
* The client that instantiated this connection
* @type {Client}
* @readonly
*/
get client() {
return this.voiceManager.client;
}
/**
* The current stream dispatcher (if any)
* @type {?StreamDispatcher}
* @readonly
*/
get dispatcher() {
return this.player.dispatcher;
}
/**
* Sets whether the voice connection should display as "speaking", "soundshare" or "none".
* @param {BitFieldResolvable} value The new speaking state
*/
setSpeaking(value) {
if (this.speaking.equals(value)) return;
if (this.status !== VoiceStatus.CONNECTED) return;
this.speaking = new Speaking(value).freeze();
this.sockets.ws
.sendPacket({
op: VoiceOPCodes.SPEAKING,
d: {
speaking: this.speaking.bitfield,
delay: 0,
ssrc: this.authentication.ssrc,
},
})
.catch(e => {
this.emit('debug', e);
});
}
/**
* The voice state of this connection
* @type {?VoiceState}
*/
get voice() {
return this.channel.guild.me?.voice ?? null;
}
/**
* Sends a request to the main gateway to join a voice channel.
* @param {Object} [options] The options to provide
* @returns {Promise<Shard>}
* @private
*/
sendVoiceStateUpdate(options = {}) {
options = Util.mergeDefault(
{
guild_id: this.channel.guild.id,
channel_id: this.channel.id,
self_mute: this.voice?.selfMute ?? false,
self_deaf: this.voice?.selfDeaf ?? false,
},
options,
);
this.emit('debug', `Sending voice state update: ${JSON.stringify(options)}`);
return this.channel.guild.shard.send(
{
op: OPCodes.VOICE_STATE_UPDATE,
d: options,
},
true,
);
}
/**
* Set the token and endpoint required to connect to the voice servers.
* @param {string} token The voice token
* @param {string} endpoint The voice endpoint
* @returns {void}
* @private
*/
setTokenAndEndpoint(token, endpoint) {
this.emit('debug', `Token "${token}" and endpoint "${endpoint}"`);
if (!endpoint) {
// Signifies awaiting endpoint stage
return;
}
if (!token) {
this.authenticateFailed('VOICE_TOKEN_ABSENT');
return;
}
endpoint = endpoint.match(/([^:]*)/)[0];
this.emit('debug', `Endpoint resolved as ${endpoint}`);
if (!endpoint) {
this.authenticateFailed('VOICE_INVALID_ENDPOINT');
return;
}
if (this.status === VoiceStatus.AUTHENTICATING) {
this.authentication.token = token;
this.authentication.endpoint = endpoint;
this.checkAuthenticated();
} else if (token !== this.authentication.token || endpoint !== this.authentication.endpoint) {
this.reconnect(token, endpoint);
}
}
/**
* Sets the Session ID for the connection.
* @param {string} sessionID The voice session ID
* @private
*/
setSessionID(sessionID) {
this.emit('debug', `Setting sessionID ${sessionID} (stored as "${this.authentication.sessionID}")`);
if (!sessionID) {
this.authenticateFailed('VOICE_SESSION_ABSENT');
return;
}
if (this.status === VoiceStatus.AUTHENTICATING) {
this.authentication.sessionID = sessionID;
this.checkAuthenticated();
} else if (sessionID !== this.authentication.sessionID) {
this.authentication.sessionID = sessionID;
/**
* Emitted when a new session ID is received.
* @event VoiceConnection#newSession
* @private
*/
this.emit('newSession', sessionID);
}
}
/**
* Checks whether the voice connection is authenticated.
* @private
*/
checkAuthenticated() {
const { token, endpoint, sessionID } = this.authentication;
this.emit('debug', `Authenticated with sessionID ${sessionID}`);
if (token && endpoint && sessionID) {
this.status = VoiceStatus.CONNECTING;
/**
* Emitted when we successfully initiate a voice connection.
* @event VoiceConnection#authenticated
*/
this.emit('authenticated');
this.connect();
}
}
/**
* Invoked when we fail to initiate a voice connection.
* @param {string} reason The reason for failure
* @private
*/
authenticateFailed(reason) {
this.client.clearTimeout(this.connectTimeout);
this.emit('debug', `Authenticate failed - ${reason}`);
if (this.status === VoiceStatus.AUTHENTICATING) {
/**
* Emitted when we fail to initiate a voice connection.
* @event VoiceConnection#failed
* @param {Error} error The encountered error
*/
this.emit('failed', new Error(reason));
} else {
/**
* Emitted whenever the connection encounters an error.
* @event VoiceConnection#error
* @param {Error} error The encountered error
*/
this.emit('error', new Error(reason));
}
this.status = VoiceStatus.DISCONNECTED;
}
/**
* Move to a different voice channel or stage channel in the same guild.
* @param {VoiceChannel|StageChannel} channel The channel to move to
* @private
*/
updateChannel(channel) {
this.channel = channel;
this.sendVoiceStateUpdate();
}
/**
* Attempts to authenticate to the voice server.
* @private
*/
authenticate() {
this.sendVoiceStateUpdate();
this.connectTimeout = this.client.setTimeout(() => this.authenticateFailed('VOICE_CONNECTION_TIMEOUT'), 15000);
}
/**
* Attempts to reconnect to the voice server (typically after a region change).
* @param {string} token The voice token
* @param {string} endpoint The voice endpoint
* @private
*/
reconnect(token, endpoint) {
this.authentication.token = token;
this.authentication.endpoint = endpoint;
this.speaking = new Speaking().freeze();
this.status = VoiceStatus.RECONNECTING;
this.emit('debug', `Reconnecting to ${endpoint}`);
/**
* Emitted when the voice connection is reconnecting (typically after a region change).
* @event VoiceConnection#reconnecting
*/
this.emit('reconnecting');
this.connect();
}
/**
* Disconnects the voice connection, causing a disconnect and closing event to be emitted.
*/
disconnect() {
this.emit('closing');
this.emit('debug', 'disconnect() triggered');
this.client.clearTimeout(this.connectTimeout);
const conn = this.voiceManager.connections.get(this.channel.guild.id);
if (conn === this) this.voiceManager.connections.delete(this.channel.guild.id);
this.sendVoiceStateUpdate({
channel_id: null,
});
this._disconnect();
}
/**
* Internally disconnects (doesn't send disconnect packet).
* @private
*/
_disconnect() {
this.cleanup();
this.status = VoiceStatus.DISCONNECTED;
/**
* Emitted when the voice connection disconnects.
* @event VoiceConnection#disconnect
*/
this.emit('disconnect');
}
/**
* Cleans up after disconnect.
* @private
*/
cleanup() {
this.player.destroy();
this.speaking = new Speaking().freeze();
const { ws, udp } = this.sockets;
this.emit('debug', 'Connection clean up');
if (ws) {
ws.removeAllListeners('error');
ws.removeAllListeners('ready');
ws.removeAllListeners('sessionDescription');
ws.removeAllListeners('speaking');
ws.shutdown();
}
if (udp) udp.removeAllListeners('error');
this.sockets.ws = null;
this.sockets.udp = null;
}
/**
* Connect the voice connection.
* @private
*/
connect() {
this.emit('debug', `Connect triggered`);
if (this.status !== VoiceStatus.RECONNECTING) {
if (this.sockets.ws) throw new Error('WS_CONNECTION_EXISTS');
if (this.sockets.udp) throw new Error('UDP_CONNECTION_EXISTS');
}
if (this.sockets.ws) this.sockets.ws.shutdown();
if (this.sockets.udp) this.sockets.udp.shutdown();
this.sockets.ws = new VoiceWebSocket(this);
this.sockets.udp = new VoiceUDP(this);
const { ws, udp } = this.sockets;
ws.on('debug', msg => this.emit('debug', msg));
udp.on('debug', msg => this.emit('debug', msg));
ws.on('error', err => this.emit('error', err));
udp.on('error', err => this.emit('error', err));
ws.on('ready', this.onReady.bind(this));
ws.on('sessionDescription', this.onSessionDescription.bind(this));
ws.on('startSpeaking', this.onStartSpeaking.bind(this));
this.sockets.ws.connect();
}
/**
* Invoked when the voice websocket is ready.
* @param {Object} data The received data
* @private
*/
onReady(data) {
Object.assign(this.authentication, data);
for (let mode of data.modes) {
if (SUPPORTED_MODES.includes(mode)) {
this.authentication.mode = mode;
this.emit('debug', `Selecting the ${mode} mode`);
break;
}
}
this.sockets.udp.createUDPSocket(data.ip);
}
/**
* Invoked when a session description is received.
* @param {Object} data The received data
* @private
*/
onSessionDescription(data) {
Object.assign(this.authentication, data);
this.status = VoiceStatus.CONNECTED;
const ready = () => {
this.client.clearTimeout(this.connectTimeout);
this.emit('debug', `Ready with authentication details: ${JSON.stringify(this.authentication)}`);
/**
* Emitted once the connection is ready, when a promise to join a voice channel resolves,
* the connection will already be ready.
* @event VoiceConnection#ready
*/
this.emit('ready');
};
if (this.dispatcher) {
ready();
} else {
// This serves to provide support for voice receive, sending audio is required to receive it.
const dispatcher = this.play(new SingleSilence(), { type: 'opus', volume: false });
dispatcher.once('finish', ready);
}
}
onStartSpeaking({ user_id, ssrc, speaking }) {
this.ssrcMap.set(+ssrc, {
...(this.ssrcMap.get(+ssrc) || {}),
userID: user_id,
speaking: speaking,
});
}
/**
* Invoked when a speaking event is received.
* @param {Object} data The received data
* @private
*/
onSpeaking({ user_id, speaking }) {
speaking = new Speaking(speaking).freeze();
const guild = this.channel.guild;
const user = this.client.users.cache.get(user_id);
const old = this._speaking.get(user_id);
this._speaking.set(user_id, speaking);
/**
* Emitted whenever a user changes speaking state.
* @event VoiceConnection#speaking
* @param {User} user The user that has changed speaking state
* @param {Readonly<Speaking>} speaking The speaking state of the user
*/
if (this.status === VoiceStatus.CONNECTED) {
this.emit('speaking', user, speaking);
if (!speaking.has(Speaking.FLAGS.SPEAKING)) {
this.receiver.packets._stoppedSpeaking(user_id);
}
}
if (guild && user && !speaking.equals(old)) {
const member = guild.members.resolve(user);
if (member) {
/**
* Emitted once a guild member changes speaking state.
* @event Client#guildMemberSpeaking
* @param {GuildMember} member The member that started/stopped speaking
* @param {Readonly<Speaking>} speaking The speaking state of the member
*/
this.client.emit(Events.GUILD_MEMBER_SPEAKING, member, speaking);
}
}
}
play() {} // eslint-disable-line no-empty-function
}
PlayInterface.applyToClass(VoiceConnection);
module.exports = VoiceConnection;

View File

@@ -1,46 +0,0 @@
'use strict';
const StreamDispatcher = require('./StreamDispatcher');
/**
* The class that sends voice packet data to the voice connection.
* @implements {VolumeInterface}
* @extends {StreamDispatcher}
*/
class BroadcastDispatcher extends StreamDispatcher {
constructor(player, options, streams) {
super(player, options, streams);
this.broadcast = player.broadcast;
}
_write(chunk, enc, done) {
if (!this.startTime) this.startTime = Date.now();
for (const dispatcher of this.broadcast.subscribers) {
dispatcher._write(chunk, enc);
}
this._step(done);
}
_destroy(err, cb) {
if (this.player.dispatcher === this) this.player.dispatcher = null;
const { streams } = this;
if (streams.opus) streams.opus.unpipe(this);
if (streams.ffmpeg) streams.ffmpeg.destroy();
super._destroy(err, cb);
}
/**
* Set the bitrate of the current Opus encoder if using a compatible Opus stream.
* @param {number} value New bitrate, in kbps
* If set to 'auto', 48kbps will be used
* @returns {boolean} true if the bitrate has been successfully changed.
*/
setBitrate(value) {
if (!value || !this.streams.opus || !this.streams.opus.setBitrate) return false;
const bitrate = value === 'auto' ? 48 : value;
this.streams.opus.setBitrate(bitrate * 1000);
return true;
}
}
module.exports = BroadcastDispatcher;

View File

@@ -1,354 +0,0 @@
'use strict';
const { Writable } = require('stream');
const secretbox = require('../util/Secretbox');
const Silence = require('../util/Silence');
const VolumeInterface = require('../util/VolumeInterface');
const FRAME_LENGTH = 20;
const CHANNELS = 2;
const TIMESTAMP_INC = (48000 / 100) * CHANNELS;
const MAX_NONCE_SIZE = 2 ** 32 - 1;
const nonce = Buffer.alloc(24);
/**
* @external WritableStream
* @see {@link https://nodejs.org/api/stream.html#stream_class_stream_writable}
*/
/**
* The class that sends voice packet data to the voice connection.
* ```js
* // Obtained using:
* voiceChannel.join().then(connection => {
* // You can play a file or a stream here:
* const dispatcher = connection.play('/home/hydrabolt/audio.mp3');
* });
* ```
* @implements {VolumeInterface}
* @extends {WritableStream}
*/
class StreamDispatcher extends Writable {
constructor(player, { seek = 0, volume = 1, fec, plp, bitrate = 96, highWaterMark = 12 } = {}, streams) {
const streamOptions = { seek, volume, fec, plp, bitrate, highWaterMark };
super(streamOptions);
/**
* The Audio Player that controls this dispatcher
* @type {AudioPlayer}
*/
this.player = player;
this.streamOptions = streamOptions;
this.streams = streams;
this.streams.silence = new Silence();
this._nonce = 0;
this._nonceBuffer = Buffer.alloc(24);
/**
* The time that the stream was paused at (null if not paused)
* @type {?number}
*/
this.pausedSince = null;
this._writeCallback = null;
/**
* The broadcast controlling this dispatcher, if any
* @type {?VoiceBroadcast}
*/
this.broadcast = this.streams.broadcast || null;
this._pausedTime = 0;
this._silentPausedTime = 0;
this.count = 0;
this.on('finish', () => {
this._cleanup();
this._setSpeaking(0);
});
this.setVolume(volume);
this.setBitrate(bitrate);
if (typeof fec !== 'undefined') this.setFEC(fec);
if (typeof plp !== 'undefined') this.setPLP(plp);
const streamError = (type, err) => {
/**
* Emitted when the dispatcher encounters an error.
* @event StreamDispatcher#error
*/
if (type && err) {
err.message = `${type} stream: ${err.message}`;
this.emit(this.player.dispatcher === this ? 'error' : 'debug', err);
}
this.destroy();
};
this.on('error', () => streamError());
if (this.streams.input) this.streams.input.on('error', err => streamError('input', err));
if (this.streams.ffmpeg) this.streams.ffmpeg.on('error', err => streamError('ffmpeg', err));
if (this.streams.opus) this.streams.opus.on('error', err => streamError('opus', err));
if (this.streams.volume) this.streams.volume.on('error', err => streamError('volume', err));
}
get _sdata() {
return this.player.streamingData;
}
_write(chunk, enc, done) {
if (!this.startTime) {
/**
* Emitted once the stream has started to play.
* @event StreamDispatcher#start
*/
this.emit('start');
this.startTime = Date.now();
}
this._playChunk(chunk);
this._step(done);
}
_destroy(err, cb) {
this._cleanup();
super._destroy(err, cb);
}
_cleanup() {
if (this.player.dispatcher === this) this.player.dispatcher = null;
const { streams } = this;
if (streams.broadcast) streams.broadcast.delete(this);
if (streams.opus) streams.opus.destroy();
if (streams.ffmpeg) streams.ffmpeg.destroy();
}
/**
* Pauses playback
* @param {boolean} [silence=false] Whether to play silence while paused to prevent audio glitches
*/
pause(silence = false) {
if (this.paused) return;
if (this.streams.opus) this.streams.opus.unpipe(this);
if (silence) {
this.streams.silence.pipe(this);
this._silence = true;
} else {
this._setSpeaking(0);
}
this.pausedSince = Date.now();
}
/**
* Whether or not playback is paused
* @type {boolean}
* @readonly
*/
get paused() {
return Boolean(this.pausedSince);
}
/**
* Total time that this dispatcher has been paused in milliseconds
* @type {number}
* @readonly
*/
get pausedTime() {
return this._silentPausedTime + this._pausedTime + (this.paused ? Date.now() - this.pausedSince : 0);
}
/**
* Resumes playback
*/
resume() {
if (!this.pausedSince) return;
this.streams.silence.unpipe(this);
if (this.streams.opus) this.streams.opus.pipe(this);
if (this._silence) {
this._silentPausedTime += Date.now() - this.pausedSince;
this._silence = false;
} else {
this._pausedTime += Date.now() - this.pausedSince;
}
this.pausedSince = null;
if (typeof this._writeCallback === 'function') this._writeCallback();
}
/**
* The time (in milliseconds) that the dispatcher has actually been playing audio for
* @type {number}
* @readonly
*/
get streamTime() {
return this.count * FRAME_LENGTH;
}
/**
* The time (in milliseconds) that the dispatcher has been playing audio for, taking into account skips and pauses
* @type {number}
* @readonly
*/
get totalStreamTime() {
return Date.now() - this.startTime;
}
/**
* Set the bitrate of the current Opus encoder if using a compatible Opus stream.
* @param {number} value New bitrate, in kbps
* If set to 'auto', the voice channel's bitrate will be used
* @returns {boolean} true if the bitrate has been successfully changed.
*/
setBitrate(value) {
if (!value || !this.bitrateEditable) return false;
const bitrate = value === 'auto' ? this.player.voiceConnection.channel.bitrate : value;
this.streams.opus.setBitrate(bitrate * 1000);
return true;
}
/**
* Sets the expected packet loss percentage if using a compatible Opus stream.
* @param {number} value between 0 and 1
* @returns {boolean} Returns true if it was successfully set.
*/
setPLP(value) {
if (!this.bitrateEditable) return false;
this.streams.opus.setPLP(value);
return true;
}
/**
* Enables or disables forward error correction if using a compatible Opus stream.
* @param {boolean} enabled true to enable
* @returns {boolean} Returns true if it was successfully set.
*/
setFEC(enabled) {
if (!this.bitrateEditable) return false;
this.streams.opus.setFEC(enabled);
return true;
}
_step(done) {
this._writeCallback = () => {
this._writeCallback = null;
done();
};
if (!this.streams.broadcast) {
const next = FRAME_LENGTH + this.count * FRAME_LENGTH - (Date.now() - this.startTime - this._pausedTime);
setTimeout(() => {
if ((!this.pausedSince || this._silence) && this._writeCallback) this._writeCallback();
}, next);
}
this._sdata.sequence++;
this._sdata.timestamp += TIMESTAMP_INC;
if (this._sdata.sequence >= 2 ** 16) this._sdata.sequence = 0;
if (this._sdata.timestamp >= 2 ** 32) this._sdata.timestamp = 0;
this.count++;
}
_final(callback) {
this._writeCallback = null;
callback();
}
_playChunk(chunk) {
if (this.player.dispatcher !== this || !this.player.voiceConnection.authentication.secret_key) return;
this._sendPacket(this._createPacket(this._sdata.sequence, this._sdata.timestamp, chunk));
}
_encrypt(buffer) {
const { secret_key, mode } = this.player.voiceConnection.authentication;
if (mode === 'xsalsa20_poly1305_lite') {
this._nonce++;
if (this._nonce > MAX_NONCE_SIZE) this._nonce = 0;
this._nonceBuffer.writeUInt32BE(this._nonce, 0);
return [secretbox.methods.close(buffer, this._nonceBuffer, secret_key), this._nonceBuffer.slice(0, 4)];
} else if (mode === 'xsalsa20_poly1305_suffix') {
const random = secretbox.methods.random(24);
return [secretbox.methods.close(buffer, random, secret_key), random];
} else {
return [secretbox.methods.close(buffer, nonce, secret_key)];
}
}
_createPacket(sequence, timestamp, buffer) {
const packetBuffer = Buffer.alloc(12);
packetBuffer[0] = 0x80;
packetBuffer[1] = 0x78;
packetBuffer.writeUIntBE(sequence, 2, 2);
packetBuffer.writeUIntBE(timestamp, 4, 4);
packetBuffer.writeUIntBE(this.player.voiceConnection.authentication.ssrc, 8, 4);
packetBuffer.copy(nonce, 0, 0, 12);
return Buffer.concat([packetBuffer, ...this._encrypt(buffer)]);
}
_sendPacket(packet) {
/**
* Emitted whenever the dispatcher has debug information.
* @event StreamDispatcher#debug
* @param {string} info The debug info
*/
this._setSpeaking(1);
if (!this.player.voiceConnection.sockets.udp) {
this.emit('debug', 'Failed to send a packet - no UDP socket');
return;
}
this.player.voiceConnection.sockets.udp.send(packet).catch(e => {
this._setSpeaking(0);
this.emit('debug', `Failed to send a packet - ${e}`);
});
}
_setSpeaking(value) {
if (typeof this.player.voiceConnection !== 'undefined') {
this.player.voiceConnection.setSpeaking(value);
}
/**
* Emitted when the dispatcher starts/stops speaking.
* @event StreamDispatcher#speaking
* @param {boolean} value Whether or not the dispatcher is speaking
*/
this.emit('speaking', value);
}
get volumeEditable() {
return Boolean(this.streams.volume);
}
/**
* Whether or not the Opus bitrate of this stream is editable
* @type {boolean}
* @readonly
*/
get bitrateEditable() {
return this.streams.opus && this.streams.opus.setBitrate;
}
// Volume
get volume() {
return this.streams.volume ? this.streams.volume.volume : 1;
}
setVolume(value) {
if (!this.streams.volume) return false;
/**
* Emitted when the volume of this dispatcher changes.
* @event StreamDispatcher#volumeChange
* @param {number} oldVolume The old volume of this dispatcher
* @param {number} newVolume The new volume of this dispatcher
*/
this.emit('volumeChange', this.volume, value);
this.streams.volume.setVolume(value);
return true;
}
// Volume stubs for docs
/* eslint-disable no-empty-function*/
get volumeDecibels() {}
get volumeLogarithmic() {}
setVolumeDecibels() {}
setVolumeLogarithmic() {}
}
VolumeInterface.applyToClass(StreamDispatcher);
module.exports = StreamDispatcher;

View File

@@ -1,154 +0,0 @@
'use strict';
const udp = require('dgram');
const EventEmitter = require('events');
const { Error } = require('../../../errors');
const { VoiceOPCodes } = require('../../../util/Constants');
/**
* Represents a UDP client for a Voice Connection.
* @extends {EventEmitter}
* @private
*/
class VoiceConnectionUDPClient extends EventEmitter {
constructor(voiceConnection) {
super();
/**
* The voice connection that this UDP client serves
* @type {VoiceConnection}
*/
this.voiceConnection = voiceConnection;
/**
* The UDP socket
* @type {?Socket}
*/
this.socket = null;
/**
* The address of the Discord voice server
* @type {?string}
*/
this.discordAddress = null;
/**
* The local IP address
* @type {?string}
*/
this.localAddress = null;
/**
* The local port
* @type {?string}
*/
this.localPort = null;
this.voiceConnection.on('closing', this.shutdown.bind(this));
}
shutdown() {
this.emit('debug', `[UDP] shutdown requested`);
if (this.socket) {
this.socket.removeAllListeners('message');
try {
this.socket.close();
} finally {
this.socket = null;
}
}
}
/**
* The port of the Discord voice server
* @type {number}
* @readonly
*/
get discordPort() {
return this.voiceConnection.authentication.port;
}
/**
* Send a packet to the UDP client.
* @param {Object} packet The packet to send
* @returns {Promise<Object>}
*/
send(packet) {
return new Promise((resolve, reject) => {
if (!this.socket) throw new Error('UDP_SEND_FAIL');
if (!this.discordAddress || !this.discordPort) throw new Error('UDP_ADDRESS_MALFORMED');
this.socket.send(packet, 0, packet.length, this.discordPort, this.discordAddress, error => {
if (error) {
this.emit('debug', `[UDP] >> ERROR: ${error}`);
reject(error);
} else {
resolve(packet);
}
});
});
}
async createUDPSocket(address) {
this.discordAddress = address;
const socket = (this.socket = udp.createSocket('udp4'));
socket.on('error', e => {
this.emit('debug', `[UDP] Error: ${e}`);
this.emit('error', e);
});
socket.on('close', () => {
this.emit('debug', '[UDP] socket closed');
});
this.emit('debug', `[UDP] created socket`);
socket.once('message', message => {
this.emit('debug', `[UDP] message: [${[...message]}] (${message})`);
// Stop if the sockets have been deleted because the connection has been closed already
if (!this.voiceConnection.sockets.ws) return;
const packet = parseLocalPacket(message);
if (packet.error) {
this.emit('debug', `[UDP] ERROR: ${packet.error}`);
this.emit('error', packet.error);
return;
}
this.localAddress = packet.address;
this.localPort = packet.port;
this.voiceConnection.sockets.ws.sendPacket({
op: VoiceOPCodes.SELECT_PROTOCOL,
d: {
protocol: 'udp',
data: {
address: packet.address,
port: packet.port,
mode: this.voiceConnection.authentication.mode,
},
},
});
this.emit('debug', `[UDP] << ${JSON.stringify(packet)}`);
socket.on('message', buffer => this.voiceConnection.receiver.packets.push(buffer));
});
const blankMessage = Buffer.alloc(70);
blankMessage.writeUIntBE(this.voiceConnection.authentication.ssrc, 0, 4);
this.emit('debug', `Sending IP discovery packet: [${[...blankMessage]}]`);
await this.send(blankMessage);
this.emit('debug', `Successfully sent IP discovery packet`);
}
}
function parseLocalPacket(message) {
try {
const packet = Buffer.from(message);
let address = '';
for (let i = 4; i < packet.indexOf(0, i); i++) address += String.fromCharCode(packet[i]);
const port = parseInt(packet.readUIntLE(packet.length - 2, 2).toString(10), 10);
return { address, port };
} catch (error) {
return { error };
}
}
module.exports = VoiceConnectionUDPClient;

View File

@@ -1,268 +0,0 @@
'use strict';
const EventEmitter = require('events');
const WebSocket = require('../../../WebSocket');
const { Error } = require('../../../errors');
const { OPCodes, VoiceOPCodes } = require('../../../util/Constants');
/**
* Represents a Voice Connection's WebSocket.
* @extends {EventEmitter}
* @private
*/
class VoiceWebSocket extends EventEmitter {
constructor(connection) {
super();
/**
* The Voice Connection that this WebSocket serves
* @type {VoiceConnection}
*/
this.connection = connection;
/**
* How many connection attempts have been made
* @type {number}
*/
this.attempts = 0;
this.dead = false;
this.connection.on('closing', this.shutdown.bind(this));
}
/**
* The client of this voice WebSocket
* @type {Client}
* @readonly
*/
get client() {
return this.connection.client;
}
shutdown() {
this.emit('debug', `[WS] shutdown requested`);
this.dead = true;
this.reset();
}
/**
* Resets the current WebSocket.
*/
reset() {
this.emit('debug', `[WS] reset requested`);
if (this.ws) {
if (this.ws.readyState !== WebSocket.CLOSED) this.ws.close();
this.ws = null;
}
this.clearHeartbeat();
}
/**
* Starts connecting to the Voice WebSocket Server.
*/
connect() {
this.emit('debug', `[WS] connect requested`);
if (this.dead) return;
if (this.ws) this.reset();
if (this.attempts >= 5) {
this.emit('debug', new Error('VOICE_CONNECTION_ATTEMPTS_EXCEEDED', this.attempts));
return;
}
this.attempts++;
/**
* The actual WebSocket used to connect to the Voice WebSocket Server.
* @type {WebSocket}
*/
this.ws = WebSocket.create(`wss://${this.connection.authentication.endpoint}/`, { v: 4 });
this.emit('debug', `[WS] connecting, ${this.attempts} attempts, ${this.ws.url}`);
this.ws.onopen = this.onOpen.bind(this);
this.ws.onmessage = this.onMessage.bind(this);
this.ws.onclose = this.onClose.bind(this);
this.ws.onerror = this.onError.bind(this);
}
/**
* Sends data to the WebSocket if it is open.
* @param {string} data The data to send to the WebSocket
* @returns {Promise<string>}
*/
send(data) {
this.emit('debug', `[WS] >> ${data}`);
return new Promise((resolve, reject) => {
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) throw new Error('WS_NOT_OPEN', data);
this.ws.send(data, null, error => {
if (error) reject(error);
else resolve(data);
});
});
}
/**
* JSON.stringify's a packet and then sends it to the WebSocket Server.
* @param {Object} packet The packet to send
* @returns {Promise<string>}
*/
sendPacket(packet) {
try {
packet = JSON.stringify(packet);
} catch (error) {
return Promise.reject(error);
}
return this.send(packet);
}
/**
* Called whenever the WebSocket opens.
*/
onOpen() {
this.emit('debug', `[WS] opened at gateway ${this.connection.authentication.endpoint}`);
this.sendPacket({
op: OPCodes.DISPATCH,
d: {
server_id: this.connection.channel.guild.id,
user_id: this.client.user.id,
token: this.connection.authentication.token,
session_id: this.connection.authentication.sessionID,
},
}).catch(() => {
this.emit('error', new Error('VOICE_JOIN_SOCKET_CLOSED'));
});
}
/**
* Called whenever a message is received from the WebSocket.
* @param {MessageEvent} event The message event that was received
* @returns {void}
*/
onMessage(event) {
try {
return this.onPacket(WebSocket.unpack(event.data, 'json'));
} catch (error) {
return this.onError(error);
}
}
/**
* Called whenever the connection to the WebSocket server is lost.
*/
onClose() {
this.emit('debug', `[WS] closed`);
if (!this.dead) this.client.setTimeout(this.connect.bind(this), this.attempts * 1000);
}
/**
* Called whenever an error occurs with the WebSocket.
* @param {Error} error The error that occurred
*/
onError(error) {
this.emit('debug', `[WS] Error: ${error}`);
this.emit('error', error);
}
/**
* Called whenever a valid packet is received from the WebSocket.
* @param {Object} packet The received packet
*/
onPacket(packet) {
this.emit('debug', `[WS] << ${JSON.stringify(packet)}`);
switch (packet.op) {
case VoiceOPCodes.HELLO:
this.setHeartbeat(packet.d.heartbeat_interval);
break;
case VoiceOPCodes.READY:
/**
* Emitted once the voice WebSocket receives the ready packet.
* @param {Object} packet The received packet
* @event VoiceWebSocket#ready
*/
this.emit('ready', packet.d);
break;
/* eslint-disable no-case-declarations */
case VoiceOPCodes.SESSION_DESCRIPTION:
packet.d.secret_key = new Uint8Array(packet.d.secret_key);
/**
* Emitted once the Voice Websocket receives a description of this voice session.
* @param {Object} packet The received packet
* @event VoiceWebSocket#sessionDescription
*/
this.emit('sessionDescription', packet.d);
break;
case VoiceOPCodes.CLIENT_CONNECT:
this.connection.ssrcMap.set(+packet.d.audio_ssrc, {
userID: packet.d.user_id,
speaking: 0,
hasVideo: Boolean(packet.d.video_ssrc),
});
break;
case VoiceOPCodes.CLIENT_DISCONNECT:
const streamInfo = this.connection.receiver && this.connection.receiver.packets.streams.get(packet.d.user_id);
if (streamInfo) {
this.connection.receiver.packets.streams.delete(packet.d.user_id);
streamInfo.stream.push(null);
}
break;
case VoiceOPCodes.SPEAKING:
/**
* Emitted whenever a speaking packet is received.
* @param {Object} data
* @event VoiceWebSocket#startSpeaking
*/
this.emit('startSpeaking', packet.d);
break;
default:
/**
* Emitted when an unhandled packet is received.
* @param {Object} packet
* @event VoiceWebSocket#unknownPacket
*/
this.emit('unknownPacket', packet);
break;
}
}
/**
* Sets an interval at which to send a heartbeat packet to the WebSocket.
* @param {number} interval The interval at which to send a heartbeat packet
*/
setHeartbeat(interval) {
if (!interval || isNaN(interval)) {
this.onError(new Error('VOICE_INVALID_HEARTBEAT'));
return;
}
if (this.heartbeatInterval) {
/**
* Emitted whenever the voice WebSocket encounters a non-fatal error.
* @param {string} warn The warning
* @event VoiceWebSocket#warn
*/
this.emit('warn', 'A voice heartbeat interval is being overwritten');
this.client.clearInterval(this.heartbeatInterval);
}
this.heartbeatInterval = this.client.setInterval(this.sendHeartbeat.bind(this), interval);
}
/**
* Clears a heartbeat interval, if one exists.
*/
clearHeartbeat() {
if (!this.heartbeatInterval) {
this.emit('warn', 'Tried to clear a heartbeat interval that does not exist');
return;
}
this.client.clearInterval(this.heartbeatInterval);
this.heartbeatInterval = null;
}
/**
* Sends a heartbeat packet.
*/
sendHeartbeat() {
this.sendPacket({ op: VoiceOPCodes.HEARTBEAT, d: Math.floor(Math.random() * 10e10) }).catch(() => {
this.emit('warn', 'Tried to send heartbeat, but connection is not open');
this.clearHeartbeat();
});
}
}
module.exports = VoiceWebSocket;

View File

@@ -1,27 +0,0 @@
'use strict';
const BasePlayer = require('./BasePlayer');
/**
* An Audio Player for a Voice Connection.
* @private
* @extends {BasePlayer}
*/
class AudioPlayer extends BasePlayer {
constructor(voiceConnection) {
super();
/**
* The voice connection that the player serves
* @type {VoiceConnection}
*/
this.voiceConnection = voiceConnection;
}
playBroadcast(broadcast, options) {
const dispatcher = this.createDispatcher(options, { broadcast });
broadcast.add(dispatcher);
return dispatcher;
}
}
module.exports = AudioPlayer;

View File

@@ -1,92 +0,0 @@
'use strict';
const EventEmitter = require('events');
const { Readable: ReadableStream } = require('stream');
const prism = require('prism-media');
const StreamDispatcher = require('../dispatcher/StreamDispatcher');
const FFMPEG_ARGUMENTS = ['-analyzeduration', '0', '-loglevel', '0', '-f', 's16le', '-ar', '48000', '-ac', '2'];
/**
* An Audio Player for a Voice Connection.
* @private
* @extends {EventEmitter}
*/
class BasePlayer extends EventEmitter {
constructor() {
super();
this.dispatcher = null;
this.streamingData = {
channels: 2,
sequence: 0,
timestamp: 0,
};
}
destroy() {
this.destroyDispatcher();
}
destroyDispatcher() {
if (this.dispatcher) {
this.dispatcher.destroy();
this.dispatcher = null;
}
}
playUnknown(input, options) {
this.destroyDispatcher();
const isStream = input instanceof ReadableStream;
const args = isStream ? FFMPEG_ARGUMENTS.slice() : ['-i', input, ...FFMPEG_ARGUMENTS];
if (options.seek) args.unshift('-ss', String(options.seek));
const ffmpeg = new prism.FFmpeg({ args });
const streams = { ffmpeg };
if (isStream) {
streams.input = input;
input.pipe(ffmpeg);
}
return this.playPCMStream(ffmpeg, options, streams);
}
playPCMStream(stream, options, streams = {}) {
this.destroyDispatcher();
const opus = (streams.opus = new prism.opus.Encoder({ channels: 2, rate: 48000, frameSize: 960 }));
if (options && options.volume === false) {
stream.pipe(opus);
return this.playOpusStream(opus, options, streams);
}
streams.volume = new prism.VolumeTransformer({ type: 's16le', volume: options ? options.volume : 1 });
stream.pipe(streams.volume).pipe(opus);
return this.playOpusStream(opus, options, streams);
}
playOpusStream(stream, options, streams = {}) {
this.destroyDispatcher();
streams.opus = stream;
if (options.volume !== false && !streams.input) {
streams.input = stream;
const decoder = new prism.opus.Decoder({ channels: 2, rate: 48000, frameSize: 960 });
streams.volume = new prism.VolumeTransformer({ type: 's16le', volume: options ? options.volume : 1 });
streams.opus = stream
.pipe(decoder)
.pipe(streams.volume)
.pipe(new prism.opus.Encoder({ channels: 2, rate: 48000, frameSize: 960 }));
}
const dispatcher = this.createDispatcher(options, streams);
streams.opus.pipe(dispatcher);
return dispatcher;
}
createDispatcher(options, streams, broadcast) {
this.destroyDispatcher();
const dispatcher = (this.dispatcher = new StreamDispatcher(this, options, streams, broadcast));
return dispatcher;
}
}
module.exports = BasePlayer;

View File

@@ -1,28 +0,0 @@
'use strict';
const BasePlayer = require('./BasePlayer');
const BroadcastDispatcher = require('../dispatcher/BroadcastDispatcher');
/**
* An Audio Player for a Voice Connection.
* @private
* @extends {BasePlayer}
*/
class AudioPlayer extends BasePlayer {
constructor(broadcast) {
super();
/**
* The broadcast that the player serves
* @type {VoiceBroadcast}
*/
this.broadcast = broadcast;
}
createDispatcher(options, streams) {
this.destroyDispatcher();
const dispatcher = (this.dispatcher = new BroadcastDispatcher(this, options, streams));
return dispatcher;
}
}
module.exports = AudioPlayer;

View File

@@ -1,144 +0,0 @@
'use strict';
const EventEmitter = require('events');
const Speaking = require('../../../util/Speaking');
const secretbox = require('../util/Secretbox');
const { SILENCE_FRAME } = require('../util/Silence');
// The delay between packets when a user is considered to have stopped speaking
// https://github.com/discordjs/discord.js/issues/3524#issuecomment-540373200
const DISCORD_SPEAKING_DELAY = 250;
class Readable extends require('stream').Readable {
_read() {} // eslint-disable-line no-empty-function
}
class PacketHandler extends EventEmitter {
constructor(receiver) {
super();
this.nonce = Buffer.alloc(24);
this.receiver = receiver;
this.streams = new Map();
this.speakingTimeouts = new Map();
}
get connection() {
return this.receiver.connection;
}
_stoppedSpeaking(userID) {
const streamInfo = this.streams.get(userID);
if (streamInfo && streamInfo.end === 'silence') {
this.streams.delete(userID);
streamInfo.stream.push(null);
}
}
makeStream(user, end) {
if (this.streams.has(user)) return this.streams.get(user).stream;
const stream = new Readable();
stream.on('end', () => this.streams.delete(user));
this.streams.set(user, { stream, end });
return stream;
}
parseBuffer(buffer) {
const { secret_key, mode } = this.receiver.connection.authentication;
// Choose correct nonce depending on encryption
let end;
if (mode === 'xsalsa20_poly1305_lite') {
buffer.copy(this.nonce, 0, buffer.length - 4);
end = buffer.length - 4;
} else if (mode === 'xsalsa20_poly1305_suffix') {
buffer.copy(this.nonce, 0, buffer.length - 24);
end = buffer.length - 24;
} else {
buffer.copy(this.nonce, 0, 0, 12);
}
// Open packet
let packet = secretbox.methods.open(buffer.slice(12, end), this.nonce, secret_key);
if (!packet) return new Error('Failed to decrypt voice packet');
packet = Buffer.from(packet);
// Strip RTP Header Extensions (one-byte only)
if (packet[0] === 0xbe && packet[1] === 0xde && packet.length > 4) {
const headerExtensionLength = packet.readUInt16BE(2);
let offset = 4;
for (let i = 0; i < headerExtensionLength; i++) {
const byte = packet[offset];
offset++;
if (byte === 0) continue;
offset += 1 + (0b1111 & (byte >> 4));
}
// Skip over undocumented Discord byte (if present)
const byte = packet.readUInt8(offset);
if (byte === 0x00 || byte === 0x02) offset++;
packet = packet.slice(offset);
}
return packet;
}
push(buffer) {
const ssrc = buffer.readUInt32BE(8);
const userStat = this.connection.ssrcMap.get(ssrc);
if (!userStat) return;
let opusPacket;
const streamInfo = this.streams.get(userStat.userID);
// If the user is in video, we need to check if the packet is just silence
if (userStat.hasVideo) {
opusPacket = this.parseBuffer(buffer);
if (opusPacket instanceof Error) {
// Only emit an error if we were actively receiving packets from this user
if (streamInfo) {
this.emit('error', opusPacket);
return;
}
}
if (SILENCE_FRAME.equals(opusPacket)) {
// If this is a silence frame, pretend we never received it
return;
}
}
let speakingTimeout = this.speakingTimeouts.get(ssrc);
if (typeof speakingTimeout === 'undefined') {
// Ensure at least the speaking bit is set.
// As the object is by reference, it's only needed once per client re-connect.
if (userStat.speaking === 0) {
userStat.speaking = Speaking.FLAGS.SPEAKING;
}
this.connection.onSpeaking({ user_id: userStat.userID, ssrc: ssrc, speaking: userStat.speaking });
speakingTimeout = this.receiver.connection.client.setTimeout(() => {
try {
this.connection.onSpeaking({ user_id: userStat.userID, ssrc: ssrc, speaking: 0 });
this.receiver.connection.client.clearTimeout(speakingTimeout);
this.speakingTimeouts.delete(ssrc);
} catch {
// Connection already closed, ignore
}
}, DISCORD_SPEAKING_DELAY);
this.speakingTimeouts.set(ssrc, speakingTimeout);
} else {
speakingTimeout.refresh();
}
if (streamInfo) {
const { stream } = streamInfo;
if (!opusPacket) {
opusPacket = this.parseBuffer(buffer);
if (opusPacket instanceof Error) {
this.emit('error', opusPacket);
return;
}
}
stream.push(opusPacket);
}
}
}
module.exports = PacketHandler;

View File

@@ -1,59 +0,0 @@
'use strict';
const EventEmitter = require('events');
const prism = require('prism-media');
const PacketHandler = require('./PacketHandler');
const { Error } = require('../../../errors');
/**
* Receives audio packets from a voice connection.
* @example
* const receiver = connection.createReceiver();
* // opusStream is a ReadableStream - that means you could play it back to a voice channel if you wanted to!
* const opusStream = receiver.createStream(user);
*/
class VoiceReceiver extends EventEmitter {
constructor(connection) {
super();
this.connection = connection;
this.packets = new PacketHandler(this);
/**
* Emitted whenever there is a warning
* @event VoiceReceiver#debug
* @param {Error|string} error The error or message to debug
*/
this.packets.on('error', err => this.emit('debug', err));
}
/**
* Options passed to `VoiceReceiver#createStream`.
* @typedef {Object} ReceiveStreamOptions
* @property {string} [mode='opus'] The mode for audio output. This defaults to opus, meaning discord.js won't decode
* the packets for you. You can set this to 'pcm' so that the stream's output will be 16-bit little-endian stereo
* audio
* @property {string} [end='silence'] When the stream should be destroyed. If `silence`, this will be when the user
* stops talking. Otherwise, if `manual`, this should be handled by you.
*/
/**
* Creates a new audio receiving stream. If a stream already exists for a user, then that stream will be returned
* rather than generating a new one.
* @param {UserResolvable} user The user to start listening to.
* @param {ReceiveStreamOptions} options Options.
* @returns {ReadableStream}
*/
createStream(user, { mode = 'opus', end = 'silence' } = {}) {
user = this.connection.client.users.resolve(user);
if (!user) throw new Error('VOICE_USER_MISSING');
const stream = this.packets.makeStream(user.id, end);
if (mode === 'pcm') {
const decoder = new prism.opus.Decoder({ channels: 2, rate: 48000, frameSize: 960 });
decoder.once('close', () => stream.destroy());
stream.pipe(decoder);
return decoder;
}
return stream;
}
}
module.exports = VoiceReceiver;

View File

@@ -1,94 +0,0 @@
'use strict';
const { Readable } = require('stream');
const prism = require('prism-media');
const { Error } = require('../../../errors');
/**
* Options that can be passed to stream-playing methods:
* @typedef {Object} StreamOptions
* @property {StreamType} [type='unknown'] The type of stream.
* @property {number} [seek=0] The time to seek to, will be ignored when playing `ogg/opus` or `webm/opus` streams
* @property {number|boolean} [volume=1] The volume to play at. Set this to false to disable volume transforms for
* this stream to improve performance.
* @property {number} [plp] Expected packet loss percentage
* @property {boolean} [fec] Enabled forward error correction
* @property {number|string} [bitrate=96] The bitrate (quality) of the audio in kbps.
* If set to 'auto', the voice channel's bitrate will be used
* @property {number} [highWaterMark=12] The maximum number of opus packets to make and store before they are
* actually needed. See https://nodejs.org/en/docs/guides/backpressuring-in-streams/. Setting this value to
* 1 means that changes in volume will be more instant.
*/
/**
* An option passed as part of `StreamOptions` specifying the type of the stream.
* * `unknown`: The default type, streams/input will be passed through to ffmpeg before encoding.
* Will play most streams.
* * `converted`: Play a stream of 16bit signed stereo PCM data, skipping ffmpeg.
* * `opus`: Play a stream of opus packets, skipping ffmpeg. You lose the ability to alter volume.
* * `ogg/opus`: Play an ogg file with the opus encoding, skipping ffmpeg. You lose the ability to alter volume.
* * `webm/opus`: Play a webm file with opus audio, skipping ffmpeg. You lose the ability to alter volume.
* @typedef {string} StreamType
*/
/**
* An interface class to allow you to play audio over VoiceConnections and VoiceBroadcasts.
*/
class PlayInterface {
constructor(player) {
this.player = player;
}
/**
* Play an audio resource.
* @param {VoiceBroadcast|ReadableStream|string} resource The resource to play.
* @param {StreamOptions} [options] The options to play.
* @example
* // Play a local audio file
* connection.play('/home/hydrabolt/audio.mp3', { volume: 0.5 });
* @example
* // Play a ReadableStream
* connection.play(ytdl('https://www.youtube.com/watch?v=ZlAU_w7-Xp8', { quality: 'highestaudio' }));
* @example
* // Play a voice broadcast
* const broadcast = client.voice.createBroadcast();
* broadcast.play('/home/hydrabolt/audio.mp3');
* connection.play(broadcast);
* @example
* // Using different protocols: https://ffmpeg.org/ffmpeg-protocols.html
* connection.play('http://www.sample-videos.com/audio/mp3/wave.mp3');
* @returns {StreamDispatcher}
*/
play(resource, options = {}) {
const VoiceBroadcast = require('../VoiceBroadcast');
if (resource instanceof VoiceBroadcast) {
if (!this.player.playBroadcast) throw new Error('VOICE_PLAY_INTERFACE_NO_BROADCAST');
return this.player.playBroadcast(resource, options);
}
if (resource instanceof Readable || typeof resource === 'string') {
const type = options.type || 'unknown';
if (type === 'unknown') {
return this.player.playUnknown(resource, options);
} else if (type === 'converted') {
return this.player.playPCMStream(resource, options);
} else if (type === 'opus') {
return this.player.playOpusStream(resource, options);
} else if (type === 'ogg/opus') {
if (!(resource instanceof Readable)) throw new Error('VOICE_PRISM_DEMUXERS_NEED_STREAM');
return this.player.playOpusStream(resource.pipe(new prism.opus.OggDemuxer()), options);
} else if (type === 'webm/opus') {
if (!(resource instanceof Readable)) throw new Error('VOICE_PRISM_DEMUXERS_NEED_STREAM');
return this.player.playOpusStream(resource.pipe(new prism.opus.WebmDemuxer()), options);
}
}
throw new Error('VOICE_PLAY_INTERFACE_BAD_TYPE');
}
static applyToClass(structure) {
for (const prop of ['play']) {
Object.defineProperty(structure.prototype, prop, Object.getOwnPropertyDescriptor(PlayInterface.prototype, prop));
}
}
}
module.exports = PlayInterface;

View File

@@ -1,32 +0,0 @@
'use strict';
const libs = {
sodium: sodium => ({
open: sodium.api.crypto_secretbox_open_easy,
close: sodium.api.crypto_secretbox_easy,
random: n => sodium.randombytes_buf(n),
}),
'libsodium-wrappers': sodium => ({
open: sodium.crypto_secretbox_open_easy,
close: sodium.crypto_secretbox_easy,
random: n => sodium.randombytes_buf(n),
}),
tweetnacl: tweetnacl => ({
open: tweetnacl.secretbox.open,
close: tweetnacl.secretbox,
random: n => tweetnacl.randomBytes(n),
}),
};
exports.methods = {};
(async () => {
for (const libName of Object.keys(libs)) {
try {
const lib = require(libName);
if (libName === 'libsodium-wrappers' && lib.ready) await lib.ready; // eslint-disable-line no-await-in-loop
exports.methods = libs[libName](lib);
break;
} catch {} // eslint-disable-line no-empty
}
})();

View File

@@ -1,15 +0,0 @@
'use strict';
const { Readable } = require('stream');
const SILENCE_FRAME = Buffer.from([0xf8, 0xff, 0xfe]);
class Silence extends Readable {
_read() {
this.push(SILENCE_FRAME);
}
}
Silence.SILENCE_FRAME = SILENCE_FRAME;
module.exports = Silence;

View File

@@ -1,103 +0,0 @@
'use strict';
const EventEmitter = require('events');
/**
* An interface class for volume transformation.
* @extends {EventEmitter}
*/
class VolumeInterface extends EventEmitter {
constructor({ volume = 1 } = {}) {
super();
this.setVolume(volume);
}
/**
* Whether or not the volume of this stream is editable
* @type {boolean}
* @readonly
*/
get volumeEditable() {
return true;
}
/**
* The current volume of the stream
* @type {number}
* @readonly
*/
get volume() {
return this._volume;
}
/**
* The current volume of the stream in decibels
* @type {number}
* @readonly
*/
get volumeDecibels() {
return Math.log10(this.volume) * 20;
}
/**
* The current volume of the stream from a logarithmic scale
* @type {number}
* @readonly
*/
get volumeLogarithmic() {
return Math.pow(this.volume, 1 / 1.660964);
}
applyVolume(buffer, volume) {
volume = volume || this._volume;
if (volume === 1) return buffer;
const out = Buffer.alloc(buffer.length);
for (let i = 0; i < buffer.length; i += 2) {
if (i >= buffer.length - 1) break;
const uint = Math.min(32767, Math.max(-32767, Math.floor(volume * buffer.readInt16LE(i))));
out.writeInt16LE(uint, i);
}
return out;
}
/**
* Sets the volume relative to the input stream - i.e. 1 is normal, 0.5 is half, 2 is double.
* @param {number} volume The volume that you want to set
*/
setVolume(volume) {
/**
* Emitted when the volume of this interface changes.
* @event VolumeInterface#volumeChange
* @param {number} oldVolume The old volume of this interface
* @param {number} newVolume The new volume of this interface
*/
this.emit('volumeChange', this._volume, volume);
this._volume = volume;
}
/**
* Sets the volume in decibels.
* @param {number} db The decibels
*/
setVolumeDecibels(db) {
this.setVolume(Math.pow(10, db / 20));
}
/**
* Sets the volume so that a perceived value of 0.5 is half the perceived volume etc.
* @param {number} value The value for the volume
*/
setVolumeLogarithmic(value) {
this.setVolume(Math.pow(value, 1.660964));
}
}
const props = ['volumeDecibels', 'volumeLogarithmic', 'setVolumeDecibels', 'setVolumeLogarithmic'];
exports.applyToClass = function applyToClass(structure) {
for (const prop of props) {
Object.defineProperty(structure.prototype, prop, Object.getOwnPropertyDescriptor(VolumeInterface.prototype, prop));
}
};

View File

@@ -55,19 +55,6 @@ const Messages = {
USER_NO_DMCHANNEL: 'No DM Channel exists!',
VOICE_INVALID_HEARTBEAT: 'Tried to set voice heartbeat but no valid interval was specified.',
VOICE_USER_MISSING: "Couldn't resolve the user to create stream.",
VOICE_JOIN_CHANNEL: (full = false) =>
`You do not have permission to join this voice channel${full ? '; it is full.' : '.'}`,
VOICE_CONNECTION_TIMEOUT: 'Connection not established within 15 seconds.',
VOICE_TOKEN_ABSENT: 'Token not provided from voice server packet.',
VOICE_SESSION_ABSENT: 'Session ID not supplied.',
VOICE_INVALID_ENDPOINT: 'Invalid endpoint received.',
VOICE_CONNECTION_ATTEMPTS_EXCEEDED: attempts => `Too many connection attempts (${attempts}).`,
VOICE_JOIN_SOCKET_CLOSED: 'Tried to send join packet, but the WebSocket is not open.',
VOICE_PLAY_INTERFACE_NO_BROADCAST: 'A broadcast cannot be played in this context.',
VOICE_PLAY_INTERFACE_BAD_TYPE: 'Unknown stream type',
VOICE_PRISM_DEMUXERS_NEED_STREAM: 'To play a webm/ogg stream, you need to pass a ReadableStream.',
VOICE_NOT_STAGE_CHANNEL: 'You are only allowed to do this in stage channels.',
VOICE_STATE_UNCACHED_MEMBER: 'The member of this voice state is uncached.',
@@ -75,10 +62,6 @@ const Messages = {
'You cannot self-deafen/mute/request to speak on VoiceStates that do not belong to the ClientUser.',
VOICE_STATE_INVALID_TYPE: name => `${name} must be a boolean.`,
UDP_SEND_FAIL: 'Tried to send a UDP packet, but there is no socket available.',
UDP_ADDRESS_MALFORMED: 'Malformed UDP address or port.',
UDP_CONNECTION_EXISTS: 'There is already an existing UDP connection.',
REQ_RESOURCE_TYPE: 'The resource must be a string, Buffer or a valid file stream.',
IMAGE_FORMAT: format => `Invalid image format: ${format}`,

View File

@@ -66,30 +66,6 @@ class BaseGuildVoiceChannel extends GuildChannel {
return true;
}
/**
* Attempts to join this voice-based channel.
* @returns {Promise<VoiceConnection>}
* @example
* // Join a voice-based channel
* channel.join()
* .then(connection => console.log('Connected!'))
* .catch(console.error);
*/
join() {
return this.client.voice.joinChannel(this);
}
/**
* Leaves this voice-based channel.
* @example
* // Leave a voice-based channel
* channel.leave();
*/
leave() {
const connection = this.client.voice.connections.get(this.guild.id);
if (connection?.channel.id === this.id) connection.disconnect();
}
/**
* Sets the RTC region of the channel.
* @param {?string} region The new region of the channel. Set to `null` to remove a specific region for the channel

View File

@@ -25,6 +25,7 @@ const {
VerificationLevels,
ExplicitContentFilterLevels,
NSFWLevels,
Status,
} = require('../util/Constants');
const DataResolver = require('../util/DataResolver');
const SystemChannelFlags = require('../util/SystemChannelFlags');
@@ -1328,6 +1329,35 @@ class Guild extends BaseGuild {
return json;
}
/**
* The voice state adapter for this guild that can be used with @discordjs/voice to play audio in voice
* and stage channels.
* @type {Function}
* @readonly
* @example
* const { joinVoiceChannel } = require('@discordjs/voice');
* const voiceConnection = joinVoiceChannel({
* channelId: channel.id,
* guildId: channel.guild.id,
* adapterCreator: channel.guild.voiceAdapterCreator,
* });
*/
get voiceAdapterCreator() {
return methods => {
this.client.voice.adapters.set(this.id, methods);
return {
sendPayload: data => {
if (this.shard.status !== Status.READY) return false;
this.shard.send(data);
return true;
},
destroy: () => {
this.client.voice.adapters.delete(this.id);
},
};
};
}
/**
* Creates a collection of this guild's roles, sorted by their position and IDs.
* @returns {Collection<Snowflake, Role>}

View File

@@ -100,16 +100,6 @@ class VoiceState extends Base {
return this.guild.channels.cache.get(this.channelID) || null;
}
/**
* If this is a voice state of the client user, then this will refer to the active VoiceConnection for this guild
* @type {?VoiceConnection}
* @readonly
*/
get connection() {
if (this.id !== this.client.user.id) return null;
return this.client.voice.connections.get(this.guild.id) || null;
}
/**
* Whether this member is either self-deafened or server-deafened
* @type {?boolean}
@@ -128,16 +118,6 @@ class VoiceState extends Base {
return this.serverMute || this.selfMute;
}
/**
* Whether this member is currently speaking. A boolean if the information is available (aka
* the bot is connected to any voice channel or stage channel in the guild), otherwise this is `null`
* @type {?boolean}
* @readonly
*/
get speaking() {
return this.channel && this.channel.connection ? Boolean(this.channel.connection._speaking.get(this.id)) : null;
}
/**
* Mutes/unmutes the member of this voice state.
* @param {boolean} mute Whether or not the member should be muted
@@ -180,34 +160,6 @@ class VoiceState extends Base {
: Promise.reject(new Error('VOICE_STATE_UNCACHED_MEMBER'));
}
/**
* Self-mutes/unmutes the bot for this voice state.
* @param {boolean} mute Whether or not the bot should be self-muted
* @returns {Promise<boolean>} true if the voice state was successfully updated, otherwise false
*/
async setSelfMute(mute) {
if (this.id !== this.client.user.id) throw new Error('VOICE_STATE_NOT_OWN');
if (typeof mute !== 'boolean') throw new TypeError('VOICE_STATE_INVALID_TYPE', 'mute');
if (!this.connection) return false;
this.selfMute = mute;
await this.connection.sendVoiceStateUpdate();
return true;
}
/**
* Self-deafens/undeafens the bot for this voice state.
* @param {boolean} deaf Whether or not the bot should be self-deafened
* @returns {Promise<boolean>} true if the voice state was successfully updated, otherwise false
*/
async setSelfDeaf(deaf) {
if (this.id !== this.client.user.id) return new Error('VOICE_STATE_NOT_OWN');
if (typeof deaf !== 'boolean') return new TypeError('VOICE_STATE_INVALID_TYPE', 'deaf');
if (!this.connection) return false;
this.selfDeaf = deaf;
await this.connection.sendVoiceStateUpdate();
return true;
}
/**
* Toggles the request to speak in the channel.
* Only applicable for stage channels and for the client's own voice state.

View File

@@ -16,6 +16,7 @@
"esnext.symbol"
],
"sourceMap": false,
"skipDefaultLibCheck": true
"skipDefaultLibCheck": true,
"skipLibCheck": true
}
}

139
typings/index.d.ts vendored
View File

@@ -91,6 +91,7 @@ declare module 'discord.js' {
import { EventEmitter } from 'events';
import { PathLike } from 'fs';
import { Readable, Stream, Writable } from 'stream';
import { DiscordGatewayAdapterCreator, DiscordGatewayAdapterLibraryMethods } from '@discordjs/voice';
import * as WebSocket from 'ws';
export const version: string;
@@ -259,8 +260,6 @@ declare module 'discord.js' {
public rtcRegion: string | null;
public bitrate: number;
public userLimit: number;
public join(): Promise<VoiceConnection>;
public leave(): void;
public setRTCRegion(region: string | null): Promise<this>;
}
@@ -271,10 +270,6 @@ declare module 'discord.js' {
private static resolveType(type: MessageComponentTypeResolvable): MessageComponentType;
}
class BroadcastDispatcher extends VolumeMixin(StreamDispatcher) {
public broadcast: VoiceBroadcast;
}
export class BitField<S extends string, N extends number | bigint = number> {
constructor(bits?: BitFieldResolvable<S, N>);
public bitfield: N;
@@ -402,12 +397,7 @@ declare module 'discord.js' {
export class ClientVoiceManager {
constructor(client: Client);
public readonly client: Client;
public connections: Collection<Snowflake, VoiceConnection>;
public broadcasts: VoiceBroadcast[];
private joinChannel(channel: VoiceChannel | StageChannel): Promise<VoiceConnection>;
public createBroadcast(): VoiceBroadcast;
public adapters: Map<Snowflake, DiscordGatewayAdapterLibraryMethods>;
}
export abstract class Collector<K, V> extends EventEmitter {
@@ -791,6 +781,7 @@ declare module 'discord.js' {
public vanityURLCode: string | null;
public vanityURLUses: number | null;
public verificationLevel: VerificationLevel;
public readonly voiceAdapterCreator: DiscordGatewayAdapterCreator;
public readonly voiceStates: VoiceStateManager;
public readonly widgetChannel: TextChannel | null;
public widgetChannelID: Snowflake | null;
@@ -1733,40 +1724,6 @@ declare module 'discord.js' {
public type: 'store';
}
class StreamDispatcher extends VolumeMixin(Writable) {
constructor(player: unknown, options?: StreamOptions, streams?: unknown);
public readonly bitrateEditable: boolean;
public broadcast: VoiceBroadcast | null;
public readonly paused: boolean;
public pausedSince: number | null;
public readonly pausedTime: number;
public player: unknown;
public readonly streamTime: number;
public readonly totalStreamTime: number;
public pause(silence?: boolean): void;
public resume(): void;
public setBitrate(value: number | 'auto'): boolean;
public setFEC(enabled: boolean): boolean;
public setPLP(value: number): boolean;
public on(event: 'close' | 'drain' | 'finish' | 'start', listener: () => Awaited<void>): this;
public on(event: 'debug', listener: (info: string) => Awaited<void>): this;
public on(event: 'error', listener: (err: Error) => Awaited<void>): this;
public on(event: 'pipe' | 'unpipe', listener: (src: Readable) => Awaited<void>): this;
public on(event: 'speaking', listener: (speaking: boolean) => Awaited<void>): this;
public on(event: 'volumeChange', listener: (oldVolume: number, newVolume: number) => Awaited<void>): this;
public on(event: string, listener: (...args: any[]) => Awaited<void>): this;
public once(event: 'close' | 'drain' | 'finish' | 'start', listener: () => Awaited<void>): this;
public once(event: 'debug', listener: (info: string) => Awaited<void>): this;
public once(event: 'error', listener: (err: Error) => Awaited<void>): this;
public once(event: 'pipe' | 'unpipe', listener: (src: Readable) => Awaited<void>): this;
public once(event: 'speaking', listener: (speaking: boolean) => Awaited<void>): this;
public once(event: 'volumeChange', listener: (oldVolume: number, newVolume: number) => Awaited<void>): this;
public once(event: string, listener: (...args: any[]) => Awaited<void>): this;
}
export class Structures {
public static get<K extends keyof Extendable>(structure: K): Extendable[K];
public static get(structure: string): (...args: any[]) => void;
@@ -1906,23 +1863,6 @@ declare module 'discord.js' {
public static splitMessage(text: string, options?: SplitOptions): string[];
}
class VoiceBroadcast extends EventEmitter {
constructor(client: Client);
public client: Client;
public subscribers: StreamDispatcher[];
public readonly dispatcher: BroadcastDispatcher | null;
public play(input: string | Readable, options?: StreamOptions): BroadcastDispatcher;
public end(): void;
public on(event: 'end', listener: () => Awaited<void>): this;
public on(event: 'subscribe' | 'unsubscribe', listener: (dispatcher: StreamDispatcher) => Awaited<void>): this;
public on(event: string, listener: (...args: any[]) => Awaited<void>): this;
public once(event: 'end', listener: () => Awaited<void>): this;
public once(event: 'subscribe' | 'unsubscribe', listener: (dispatcher: StreamDispatcher) => Awaited<void>): this;
public once(event: string, listener: (...args: any[]) => Awaited<void>): this;
}
export class VoiceChannel extends BaseGuildVoiceChannel {
public readonly editable: boolean;
public readonly speakable: boolean;
@@ -1931,75 +1871,6 @@ declare module 'discord.js' {
public setUserLimit(userLimit: number, reason?: string): Promise<VoiceChannel>;
}
class VoiceConnection extends EventEmitter {
constructor(voiceManager: ClientVoiceManager, channel: VoiceChannel);
private authentication: unknown;
private sockets: unknown;
private ssrcMap: Map<number, boolean>;
private _speaking: Map<Snowflake, Readonly<Speaking>>;
private _disconnect(): void;
private authenticate(): void;
private authenticateFailed(reason: string): void;
private checkAuthenticated(): void;
private cleanup(): void;
private connect(): void;
private onReady(data: unknown): void;
private onSessionDescription(mode: string, secret: string): void;
private onSpeaking(data: unknown): void;
private reconnect(token: string, endpoint: string): void;
private sendVoiceStateUpdate(options: unknown): Promise<Shard>;
private setSessionID(sessionID: string): void;
private setTokenAndEndpoint(token: string, endpoint: string): void;
private updateChannel(channel: VoiceChannel | StageChannel): void;
public channel: VoiceChannel | StageChannel;
public readonly client: Client;
public readonly dispatcher: StreamDispatcher | null;
public player: unknown;
public receiver: VoiceReceiver;
public speaking: Readonly<Speaking>;
public status: VoiceStatus;
public readonly voice: VoiceState | null;
public voiceManager: ClientVoiceManager;
public disconnect(): void;
public play(input: VoiceBroadcast | Readable | string, options?: StreamOptions): StreamDispatcher;
public setSpeaking(value: BitFieldResolvable<SpeakingString, number>): void;
public on(
event: 'authenticated' | 'closing' | 'newSession' | 'ready' | 'reconnecting',
listener: () => Awaited<void>,
): this;
public on(event: 'debug', listener: (message: string) => Awaited<void>): this;
public on(event: 'error' | 'failed' | 'disconnect', listener: (error: Error) => Awaited<void>): this;
public on(event: 'speaking', listener: (user: User, speaking: Readonly<Speaking>) => Awaited<void>): this;
public on(event: 'warn', listener: (warning: string | Error) => Awaited<void>): this;
public on(event: string, listener: (...args: any[]) => Awaited<void>): this;
public once(
event: 'authenticated' | 'closing' | 'newSession' | 'ready' | 'reconnecting',
listener: () => Awaited<void>,
): this;
public once(event: 'debug', listener: (message: string) => Awaited<void>): this;
public once(event: 'error' | 'failed' | 'disconnect', listener: (error: Error) => Awaited<void>): this;
public once(event: 'speaking', listener: (user: User, speaking: Readonly<Speaking>) => Awaited<void>): this;
public once(event: 'warn', listener: (warning: string | Error) => Awaited<void>): this;
public once(event: string, listener: (...args: any[]) => Awaited<void>): this;
}
class VoiceReceiver extends EventEmitter {
constructor(connection: VoiceConnection);
public createStream(
user: UserResolvable,
options?: { mode?: 'opus' | 'pcm'; end?: 'silence' | 'manual' },
): Readable;
public on(event: 'debug', listener: (error: Error | string) => Awaited<void>): this;
public on(event: string, listener: (...args: any[]) => Awaited<void>): this;
public once(event: 'debug', listener: (error: Error | string) => Awaited<void>): this;
public once(event: string, listener: (...args: any[]) => Awaited<void>): this;
}
export class VoiceRegion {
constructor(data: unknown);
public custom: boolean;
@@ -2015,7 +1886,6 @@ declare module 'discord.js' {
constructor(guild: Guild, data: unknown);
public readonly channel: VoiceChannel | StageChannel | null;
public channelID: Snowflake | null;
public readonly connection: VoiceConnection | null;
public readonly deaf: boolean | null;
public guild: Guild;
public id: Snowflake;
@@ -2030,14 +1900,11 @@ declare module 'discord.js' {
public selfVideo: boolean;
public suppress: boolean;
public requestToSpeakTimestamp: number | null;
public readonly speaking: boolean | null;
public setDeaf(deaf: boolean, reason?: string): Promise<GuildMember>;
public setMute(mute: boolean, reason?: string): Promise<GuildMember>;
public kick(reason?: string): Promise<GuildMember>;
public setChannel(channel: ChannelResolvable | null, reason?: string): Promise<GuildMember>;
public setSelfDeaf(deaf: boolean): Promise<boolean>;
public setSelfMute(mute: boolean): Promise<boolean>;
public setRequestToSpeak(request: boolean): Promise<void>;
public setSuppressed(suppressed: boolean): Promise<void>;
}