From ec1ed15c885c2c8adf99e00cc8b09a4d2e11c174 Mon Sep 17 00:00:00 2001 From: Gus Caplan Date: Sat, 11 Feb 2017 06:04:24 -0600 Subject: [PATCH] Fix request handling (#1180) * clean up ratelimiters, and disable burst until some big questions are handled * burst mode is a work * fix burst again --- src/client/rest/RequestHandlers/Burst.js | 56 +++++++++---------- src/client/rest/RequestHandlers/Sequential.js | 22 ++------ src/util/Constants.js | 2 + 3 files changed, 31 insertions(+), 49 deletions(-) diff --git a/src/client/rest/RequestHandlers/Burst.js b/src/client/rest/RequestHandlers/Burst.js index 2cc1a590c..dd33a4b52 100644 --- a/src/client/rest/RequestHandlers/Burst.js +++ b/src/client/rest/RequestHandlers/Burst.js @@ -3,8 +3,15 @@ const RequestHandler = require('./RequestHandler'); class BurstRequestHandler extends RequestHandler { constructor(restManager, endpoint) { super(restManager, endpoint); - this.requestRemaining = 1; - this.first = true; + + this.client = restManager.client; + + this.limit = Infinity; + this.resetTime = null; + this.remaining = 1; + this.timeDifference = 0; + + this.resetTimeout = null; } push(request) { @@ -12,58 +19,45 @@ class BurstRequestHandler extends RequestHandler { this.handle(); } - handleNext(time) { - if (this.waiting) return; - this.waiting = true; - this.restManager.client.setTimeout(() => { - this.requestRemaining = this.requestLimit; - this.waiting = false; - this.handle(); - }, time); - } - execute(item) { + if (!item) return; item.request.gen().end((err, res) => { if (res && res.headers) { - this.requestLimit = res.headers['x-ratelimit-limit']; - this.requestResetTime = Number(res.headers['x-ratelimit-reset']) * 1000; - this.requestRemaining = Number(res.headers['x-ratelimit-remaining']); + this.limit = Number(res.headers['x-ratelimit-limit']); + this.resetTime = Number(res.headers['x-ratelimit-reset']) * 1000; + this.remaining = Number(res.headers['x-ratelimit-remaining']); this.timeDifference = Date.now() - new Date(res.headers.date).getTime(); - this.handleNext( - this.requestResetTime - Date.now() + this.timeDifference + this.restManager.client.options.restTimeOffset - ); } if (err) { if (err.status === 429) { - this.requestRemaining = 0; this.queue.unshift(item); - this.restManager.client.setTimeout(() => { + if (res.headers['x-ratelimit-global']) this.globalLimit = true; + if (this.resetTimeout) return; + this.resetTimeout = this.client.setTimeout(() => { + this.remaining = this.limit; this.globalLimit = false; this.handle(); - }, Number(res.headers['retry-after']) + this.restManager.client.options.restTimeOffset); - if (res.headers['x-ratelimit-global']) this.globalLimit = true; + this.resetTimeout = null; + }, Number(res.headers['retry-after']) + this.client.options.restTimeOffset); } else { item.reject(err); + this.handle(); } } else { this.globalLimit = false; const data = res && res.body ? res.body : {}; item.resolve(data); - if (this.first) { - this.first = false; - this.handle(); - } + this.handle(); } }); } handle() { super.handle(); - if (this.requestRemaining < 1 || this.queue.length === 0 || this.globalLimit) return; - while (this.queue.length > 0 && this.requestRemaining > 0) { - this.execute(this.queue.shift()); - this.requestRemaining--; - } + if (this.remaining <= 0 || this.queue.length === 0 || this.globalLimit) return; + this.execute(this.queue.shift()); + this.remaining--; + this.handle(); } } diff --git a/src/client/rest/RequestHandlers/Sequential.js b/src/client/rest/RequestHandlers/Sequential.js index 0abf36db2..261240df1 100644 --- a/src/client/rest/RequestHandlers/Sequential.js +++ b/src/client/rest/RequestHandlers/Sequential.js @@ -15,12 +15,6 @@ class SequentialRequestHandler extends RequestHandler { constructor(restManager, endpoint) { super(restManager, endpoint); - /** - * Whether this rate limiter is waiting for a response from a request - * @type {boolean} - */ - this.waiting = false; - /** * The endpoint that this handler is handling * @type {string} @@ -49,27 +43,24 @@ class SequentialRequestHandler extends RequestHandler { return new Promise(resolve => { item.request.gen().end((err, res) => { if (res && res.headers) { - this.requestLimit = res.headers['x-ratelimit-limit']; + this.requestLimit = Number(res.headers['x-ratelimit-limit']); this.requestResetTime = Number(res.headers['x-ratelimit-reset']) * 1000; this.requestRemaining = Number(res.headers['x-ratelimit-remaining']); this.timeDifference = Date.now() - new Date(res.headers.date).getTime(); } if (err) { if (err.status === 429) { + this.queue.unshift(item); this.restManager.client.setTimeout(() => { - this.waiting = false; this.globalLimit = false; resolve(); }, Number(res.headers['retry-after']) + this.restManager.client.options.restTimeOffset); if (res.headers['x-ratelimit-global']) this.globalLimit = true; } else { - this.queue.shift(); - this.waiting = false; item.reject(err); resolve(err); } } else { - this.queue.shift(); this.globalLimit = false; const data = res && res.body ? res.body : {}; item.resolve(data); @@ -82,7 +73,6 @@ class SequentialRequestHandler extends RequestHandler { this.requestResetTime - Date.now() + this.timeDifference + this.restManager.client.options.restTimeOffset ); } else { - this.waiting = false; resolve(data); } } @@ -92,12 +82,8 @@ class SequentialRequestHandler extends RequestHandler { handle() { super.handle(); - - if (this.waiting || this.queue.length === 0 || this.globalLimit) return; - this.waiting = true; - - const item = this.queue[0]; - this.execute(item).then(() => this.handle()); + if (this.remaining === 0 || this.queue.length === 0 || this.globalLimit) return; + this.execute(this.queue.shift()).then(() => this.handle()); } } diff --git a/src/util/Constants.js b/src/util/Constants.js index df1c7b227..cc7569697 100644 --- a/src/util/Constants.js +++ b/src/util/Constants.js @@ -6,6 +6,8 @@ exports.Package = require('../../package.json'); * @property {string} [apiRequestMethod='sequential'] One of `sequential` or `burst`. The sequential handler executes * all requests in the order they are triggered, whereas the burst handler runs multiple in parallel, and doesn't * provide the guarantee of any particular order. + * Burst mode is more likely to hit a 429 ratelimit by its nature, + * be advised if you are very unlucky you could be IP banned * @property {number} [shardId=0] ID of the shard to run * @property {number} [shardCount=0] Total number of shards * @property {number} [messageCacheMaxSize=200] Maximum number of messages to cache per channel