Skip to content

Commit 877d975

Browse files
committed
thottling comments
1 parent 237f6da commit 877d975

File tree

1 file changed

+16
-40
lines changed

1 file changed

+16
-40
lines changed

src/middleware/index.ts

Lines changed: 16 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -42,33 +42,42 @@ export function expressRateLimiter(
4242
*/
4343
// TODO: Throw ValidationError if schema is invalid
4444
const typeWeightObject = buildTypeWeightsFromSchema(schema, typeWeightConfig);
45+
4546
// TODO: Throw error if connection is unsuccessful
46-
// Default connection timeout is 10000 ms of inactivity
47-
// FIXME: Do we need to re-establish connection?
4847
const redisClient = connect(redisClientOptions); // Default port is 6379 automatically
4948
const rateLimiter = setupRateLimiter(rateLimiterAlgo, rateLimiterOptions, redisClient);
5049

5150
// stores request IDs to be processed
5251
const requestQueue: { [index: string]: string[] } = {};
52+
53+
// Manages processing of event queue
5354
const requestEvents = new EventEmitter();
5455

55-
// Throttle rateLimiter.processRequest based on user IP to prent inaccurate redis reads
56+
/**
57+
* Throttle rateLimiter.processRequest based on user IP to prevent inaccurate redis reads
58+
* Throttling is based on a event driven promise fulfillment approach.
59+
* Each time a request is received a promise is added to the user's request queue. The promise "subscribes"
60+
* to the previous request in the user's queue then calls processRequest and resolves once the previous request
61+
* is complete.
62+
* @param userId
63+
* @param timestamp
64+
* @param tokens
65+
* @returns
66+
*/
5667
async function throttledProcess(
5768
userId: string,
5869
timestamp: number,
5970
tokens: number
6071
): Promise<RateLimiterResponse> {
61-
// Generate a random uuid for this request and add it to the queue
62-
// Alternatively use crypto.randomUUID() to generate a uuid
72+
// Alternatively use crypto.randomUUID() to generate a random uuid
6373
const requestId = `${timestamp}${tokens}`;
6474

6575
if (!requestQueue[userId]) {
6676
requestQueue[userId] = [];
6777
}
6878
requestQueue[userId].push(requestId);
6979

70-
// Start a loop to check when this request should be processed
71-
return new Promise((resolve, reject) => {
80+
return new Promise((resolve) => {
7281
requestEvents.once(requestId, async () => {
7382
// process the request
7483
const response = await rateLimiter.processRequest(userId, timestamp, tokens);
@@ -82,39 +91,6 @@ export function expressRateLimiter(
8291
});
8392
}
8493

85-
// Sort the requests by timestamps to make sure we process in the correct order
86-
// We need to store the request, response and next object so that the correct one is used
87-
// the function we return accepts the unique request, response, next objects
88-
// it will store these and then process them in the order in which they were received.
89-
// We can do an event listener that waits for the previous request in the queue to be finished
90-
// store the middleware in closure
91-
92-
// Catch the request
93-
// add this to the queue
94-
// proccess the oldest request in the queue
95-
// check if the queue is empty => if not process the next request
96-
// otherwise return
97-
// process restarts when the next request comes in
98-
99-
// so r1, r2 come in
100-
// r1, and r2 get processed with thin same frame on call stack
101-
// r2 call is done once r2 is added to the queue
102-
103-
// return a throttled middleware. Check every 100ms? make this a setting?
104-
// how do we make sure these get queued properly?
105-
// store the requests in an array when available grab the next request for a user
106-
/**
107-
* Request 1 comes in
108-
* Start handling request 1
109-
* In the meantime reqeust 2 comes in for the same suer
110-
* Finish handling request 1
111-
* check the queue for this user
112-
* if it's empty we're done
113-
* it it has a request handle the next one
114-
*
115-
* Not throttling on time just queueing requests.
116-
*/
117-
11894
return async (
11995
req: Request,
12096
res: Response,

0 commit comments

Comments
 (0)