Fix request handling (#1180)

* clean up ratelimiters, and disable burst until some big questions are handled

* burst mode is a work

* fix burst again
This commit is contained in:
Gus Caplan
2017-02-11 06:04:24 -06:00
committed by Amish Shah
parent dd6dd6fb59
commit ec1ed15c88
3 changed files with 31 additions and 49 deletions

View File

@@ -3,8 +3,15 @@ const RequestHandler = require('./RequestHandler');
class BurstRequestHandler extends RequestHandler {
constructor(restManager, endpoint) {
super(restManager, endpoint);
this.requestRemaining = 1;
this.first = true;
this.client = restManager.client;
this.limit = Infinity;
this.resetTime = null;
this.remaining = 1;
this.timeDifference = 0;
this.resetTimeout = null;
}
push(request) {
@@ -12,58 +19,45 @@ class BurstRequestHandler extends RequestHandler {
this.handle();
}
handleNext(time) {
if (this.waiting) return;
this.waiting = true;
this.restManager.client.setTimeout(() => {
this.requestRemaining = this.requestLimit;
this.waiting = false;
this.handle();
}, time);
}
execute(item) {
if (!item) return;
item.request.gen().end((err, res) => {
if (res && res.headers) {
this.requestLimit = res.headers['x-ratelimit-limit'];
this.requestResetTime = Number(res.headers['x-ratelimit-reset']) * 1000;
this.requestRemaining = Number(res.headers['x-ratelimit-remaining']);
this.limit = Number(res.headers['x-ratelimit-limit']);
this.resetTime = Number(res.headers['x-ratelimit-reset']) * 1000;
this.remaining = Number(res.headers['x-ratelimit-remaining']);
this.timeDifference = Date.now() - new Date(res.headers.date).getTime();
this.handleNext(
this.requestResetTime - Date.now() + this.timeDifference + this.restManager.client.options.restTimeOffset
);
}
if (err) {
if (err.status === 429) {
this.requestRemaining = 0;
this.queue.unshift(item);
this.restManager.client.setTimeout(() => {
if (res.headers['x-ratelimit-global']) this.globalLimit = true;
if (this.resetTimeout) return;
this.resetTimeout = this.client.setTimeout(() => {
this.remaining = this.limit;
this.globalLimit = false;
this.handle();
}, Number(res.headers['retry-after']) + this.restManager.client.options.restTimeOffset);
if (res.headers['x-ratelimit-global']) this.globalLimit = true;
this.resetTimeout = null;
}, Number(res.headers['retry-after']) + this.client.options.restTimeOffset);
} else {
item.reject(err);
this.handle();
}
} else {
this.globalLimit = false;
const data = res && res.body ? res.body : {};
item.resolve(data);
if (this.first) {
this.first = false;
this.handle();
}
this.handle();
}
});
}
handle() {
super.handle();
if (this.requestRemaining < 1 || this.queue.length === 0 || this.globalLimit) return;
while (this.queue.length > 0 && this.requestRemaining > 0) {
this.execute(this.queue.shift());
this.requestRemaining--;
}
if (this.remaining <= 0 || this.queue.length === 0 || this.globalLimit) return;
this.execute(this.queue.shift());
this.remaining--;
this.handle();
}
}

View File

@@ -15,12 +15,6 @@ class SequentialRequestHandler extends RequestHandler {
constructor(restManager, endpoint) {
super(restManager, endpoint);
/**
* Whether this rate limiter is waiting for a response from a request
* @type {boolean}
*/
this.waiting = false;
/**
* The endpoint that this handler is handling
* @type {string}
@@ -49,27 +43,24 @@ class SequentialRequestHandler extends RequestHandler {
return new Promise(resolve => {
item.request.gen().end((err, res) => {
if (res && res.headers) {
this.requestLimit = res.headers['x-ratelimit-limit'];
this.requestLimit = Number(res.headers['x-ratelimit-limit']);
this.requestResetTime = Number(res.headers['x-ratelimit-reset']) * 1000;
this.requestRemaining = Number(res.headers['x-ratelimit-remaining']);
this.timeDifference = Date.now() - new Date(res.headers.date).getTime();
}
if (err) {
if (err.status === 429) {
this.queue.unshift(item);
this.restManager.client.setTimeout(() => {
this.waiting = false;
this.globalLimit = false;
resolve();
}, Number(res.headers['retry-after']) + this.restManager.client.options.restTimeOffset);
if (res.headers['x-ratelimit-global']) this.globalLimit = true;
} else {
this.queue.shift();
this.waiting = false;
item.reject(err);
resolve(err);
}
} else {
this.queue.shift();
this.globalLimit = false;
const data = res && res.body ? res.body : {};
item.resolve(data);
@@ -82,7 +73,6 @@ class SequentialRequestHandler extends RequestHandler {
this.requestResetTime - Date.now() + this.timeDifference + this.restManager.client.options.restTimeOffset
);
} else {
this.waiting = false;
resolve(data);
}
}
@@ -92,12 +82,8 @@ class SequentialRequestHandler extends RequestHandler {
handle() {
super.handle();
if (this.waiting || this.queue.length === 0 || this.globalLimit) return;
this.waiting = true;
const item = this.queue[0];
this.execute(item).then(() => this.handle());
if (this.remaining === 0 || this.queue.length === 0 || this.globalLimit) return;
this.execute(this.queue.shift()).then(() => this.handle());
}
}

View File

@@ -6,6 +6,8 @@ exports.Package = require('../../package.json');
* @property {string} [apiRequestMethod='sequential'] One of `sequential` or `burst`. The sequential handler executes
* all requests in the order they are triggered, whereas the burst handler runs multiple in parallel, and doesn't
* provide the guarantee of any particular order.
* <warn>Burst mode is more likely to hit a 429 ratelimit by its nature,
* be advised if you are very unlucky you could be IP banned</warn>
* @property {number} [shardId=0] ID of the shard to run
* @property {number} [shardCount=0] Total number of shards
* @property {number} [messageCacheMaxSize=200] Maximum number of messages to cache per channel