Skip to content

Commit 5f34e61

Browse files
committed
throttled call to processRequest during rate limiter setup rather than express middleware
1 parent 149a719 commit 5f34e61

File tree

3 files changed

+99
-87
lines changed

3 files changed

+99
-87
lines changed

src/middleware/index.ts

Lines changed: 1 addition & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
1-
import EventEmitter from 'events';
21
import { parse, validate } from 'graphql';
32
import { GraphQLSchema } from 'graphql/type/schema';
43
import { Request, Response, NextFunction, RequestHandler } from 'express';
54
import buildTypeWeightsFromSchema, { defaultTypeWeightsConfig } from '../analysis/buildTypeWeights';
65
import setupRateLimiter from './rateLimiterSetup';
76
import { ExpressMiddlewareConfig, ExpressMiddlewareSet } from '../@types/expressMiddleware';
8-
import { RateLimiterResponse } from '../@types/rateLimit';
97
import { connect } from '../utils/redis';
108
import ASTParser from '../analysis/ASTParser';
119

@@ -66,71 +64,6 @@ export default function expressGraphQLRateLimiter(
6664
middlewareSetup.redis.keyExpiry
6765
);
6866

69-
/**
70-
* We are using a queue and event emitter to handle situations where a user has two concurrent requests being processed.
71-
* The trailing request will be added to the queue to and await the prior request processing by the rate-limiter
72-
* This will maintain the consistency and accuracy of the cache when under load from one user
73-
*/
74-
// stores request IDs for each user in an array to be processed
75-
const requestQueues: { [index: string]: string[] } = {};
76-
// Manages processing of requests queue
77-
const requestEvents = new EventEmitter();
78-
79-
// processes requests (by resolving promises) that have been throttled by throttledProcess
80-
async function processRequestResolver(
81-
userId: string,
82-
timestamp: number,
83-
tokens: number,
84-
resolve: (value: RateLimiterResponse | PromiseLike<RateLimiterResponse>) => void,
85-
reject: (reason: any) => void
86-
) {
87-
try {
88-
const response = await rateLimiter.processRequest(userId, timestamp, tokens);
89-
requestQueues[userId] = requestQueues[userId].slice(1);
90-
resolve(response);
91-
// trigger the next event and delete the request queue for this user if there are no more requests to process
92-
requestEvents.emit(requestQueues[userId][0]);
93-
if (requestQueues[userId].length === 0) delete requestQueues[userId];
94-
} catch (err) {
95-
reject(err);
96-
}
97-
}
98-
99-
/**
100-
* Throttle rateLimiter.processRequest based on user IP to prevent inaccurate redis reads
101-
* Throttling is based on a event driven promise fulfillment approach.
102-
* Each time a request is received a promise is added to the user's request queue. The promise "subscribes"
103-
* to the previous request in the user's queue then calls processRequest and resolves once the previous request
104-
* is complete.
105-
* @param userId
106-
* @param timestamp
107-
* @param tokens
108-
* @returns
109-
*/
110-
async function throttledProcess(
111-
userId: string,
112-
timestamp: number,
113-
tokens: number
114-
): Promise<RateLimiterResponse> {
115-
// Alternatively use crypto.randomUUID() to generate a random uuid
116-
const requestId = `${timestamp}${tokens}`;
117-
118-
if (!requestQueues[userId]) {
119-
requestQueues[userId] = [];
120-
}
121-
requestQueues[userId].push(requestId);
122-
123-
return new Promise((resolve, reject) => {
124-
if (requestQueues[userId].length > 1) {
125-
requestEvents.once(requestId, async () => {
126-
await processRequestResolver(userId, timestamp, tokens, resolve, reject);
127-
});
128-
} else {
129-
processRequestResolver(userId, timestamp, tokens, resolve, reject);
130-
}
131-
});
132-
}
133-
13467
/** Rate-limiting middleware */
13568
return async (
13669
req: Request,
@@ -169,7 +102,7 @@ export default function expressGraphQLRateLimiter(
169102
const queryComplexity = queryParser.processQuery(queryAST);
170103

171104
try {
172-
const rateLimiterResponse = await throttledProcess(
105+
const rateLimiterResponse = await rateLimiter.processRequest(
173106
ip,
174107
requestTimestamp,
175108
queryComplexity

src/middleware/rateLimiterSetup.ts

Lines changed: 97 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
1+
import EventEmitter from 'events';
2+
13
import Redis from 'ioredis';
2-
import { RateLimiterConfig } from '../@types/rateLimit';
4+
5+
import { RateLimiter, RateLimiterConfig, RateLimiterResponse } from '../@types/rateLimit';
36
import TokenBucket from '../rateLimiters/tokenBucket';
47
import SlidingWindowCounter from '../rateLimiters/slidingWindowCounter';
58
import SlidingWindowLog from '../rateLimiters/slidingWindowLog';
@@ -9,46 +12,50 @@ import FixedWindow from '../rateLimiters/fixedWindow';
912
* Instatieate the rateLimiting algorithm class based on the developer selection and options
1013
*
1114
* @export
12-
* @param {RateLimiterConfig} rateLimiter limiter selection and option
15+
* @param {RateLimiterConfig} rateLimiterConfig limiter selection and option
1316
* @param {Redis} client
1417
* @param {number} keyExpiry
15-
* @return {*}
18+
* @return {RateLimiter}
1619
*/
1720
export default function setupRateLimiter(
18-
rateLimiter: RateLimiterConfig,
21+
rateLimiterConfig: RateLimiterConfig,
1922
client: Redis,
2023
keyExpiry: number
21-
) {
24+
): RateLimiter {
25+
let rateLimiter: RateLimiter;
26+
2227
try {
23-
switch (rateLimiter.type) {
28+
switch (rateLimiterConfig.type) {
2429
case 'TOKEN_BUCKET':
25-
return new TokenBucket(
26-
rateLimiter.capacity,
27-
rateLimiter.refillRate,
30+
rateLimiter = new TokenBucket(
31+
rateLimiterConfig.capacity,
32+
rateLimiterConfig.refillRate,
2833
client,
2934
keyExpiry
3035
);
3136
break;
3237
case 'LEAKY_BUCKET':
3338
throw new Error('Leaky Bucket algonithm has not be implemented.');
3439
case 'FIXED_WINDOW':
35-
return new FixedWindow(
36-
rateLimiter.capacity,
37-
rateLimiter.windowSize,
40+
rateLimiter = new FixedWindow(
41+
rateLimiterConfig.capacity,
42+
rateLimiterConfig.windowSize,
3843
client,
3944
keyExpiry
4045
);
46+
break;
4147
case 'SLIDING_WINDOW_LOG':
42-
return new SlidingWindowLog(
43-
rateLimiter.windowSize,
44-
rateLimiter.capacity,
48+
rateLimiter = new SlidingWindowLog(
49+
rateLimiterConfig.windowSize,
50+
rateLimiterConfig.capacity,
4551
client,
4652
keyExpiry
4753
);
54+
break;
4855
case 'SLIDING_WINDOW_COUNTER':
49-
return new SlidingWindowCounter(
50-
rateLimiter.windowSize,
51-
rateLimiter.capacity,
56+
rateLimiter = new SlidingWindowCounter(
57+
rateLimiterConfig.windowSize,
58+
rateLimiterConfig.capacity,
5259
client,
5360
keyExpiry
5461
);
@@ -57,6 +64,78 @@ export default function setupRateLimiter(
5764
// typescript should never let us invoke this function with anything other than the options above
5865
throw new Error('Selected rate limiting algorithm is not suppported');
5966
}
67+
68+
const processRequest = rateLimiter.processRequest.bind(rateLimiter);
69+
70+
/**
71+
* We are using a queue and event emitter to handle situations where a user has two concurrent requests being processed.
72+
* The trailing request will be added to the queue to and await the prior request processing by the rate-limiter
73+
* This will maintain the consistency and accuracy of the cache when under load from one user
74+
*/
75+
// stores request IDs for each user in an array to be processed
76+
const requestQueues: { [index: string]: string[] } = {};
77+
// Manages processing of requests queue
78+
const requestEvents = new EventEmitter();
79+
80+
// processes requests (by resolving promises) that have been throttled by throttledProcess
81+
// eslint-disable-next-line no-inner-declarations
82+
async function processRequestResolver(
83+
userId: string,
84+
timestamp: number,
85+
tokens: number,
86+
resolve: (value: RateLimiterResponse | PromiseLike<RateLimiterResponse>) => void,
87+
reject: (reason: unknown) => void
88+
) {
89+
try {
90+
const response = await processRequest(userId, timestamp, tokens);
91+
requestQueues[userId] = requestQueues[userId].slice(1);
92+
resolve(response);
93+
// trigger the next event and delete the request queue for this user if there are no more requests to process
94+
requestEvents.emit(requestQueues[userId][0]);
95+
if (requestQueues[userId].length === 0) delete requestQueues[userId];
96+
} catch (err) {
97+
reject(err);
98+
}
99+
}
100+
101+
/**
102+
* Throttle rateLimiter.processRequest based on user IP to prevent inaccurate redis reads
103+
* Throttling is based on a event driven promise fulfillment approach.
104+
* Each time a request is received a promise is added to the user's request queue. The promise "subscribes"
105+
* to the previous request in the user's queue then calls processRequest and resolves once the previous request
106+
* is complete.
107+
* @param userId
108+
* @param timestamp
109+
* @param tokens
110+
* @returns
111+
*/
112+
// eslint-disable-next-line no-inner-declarations
113+
async function throttledProcess(
114+
userId: string,
115+
timestamp: number,
116+
tokens = 1
117+
): Promise<RateLimiterResponse> {
118+
// Alternatively use crypto.randomUUID() to generate a random uuid
119+
const requestId = `${timestamp}${tokens}`;
120+
121+
if (!requestQueues[userId]) {
122+
requestQueues[userId] = [];
123+
}
124+
requestQueues[userId].push(requestId);
125+
126+
return new Promise((resolve, reject) => {
127+
if (requestQueues[userId].length > 1) {
128+
requestEvents.once(requestId, async () => {
129+
await processRequestResolver(userId, timestamp, tokens, resolve, reject);
130+
});
131+
} else {
132+
processRequestResolver(userId, timestamp, tokens, resolve, reject);
133+
}
134+
});
135+
}
136+
137+
rateLimiter.processRequest = throttledProcess;
138+
return rateLimiter;
60139
} catch (err) {
61140
throw new Error(`Error in expressGraphQLRateLimiter setting up rate-limiter: ${err}`);
62141
}

test/middleware/express.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -375,7 +375,7 @@ describe('Express Middleware tests', () => {
375375

376376
describe('Adds expected properties to res.locals', () => {
377377
test('Adds UNIX timestamp', async () => {
378-
jest.useRealTimers();
378+
// jest.useRealTimers();
379379
await middleware(mockRequest as Request, mockResponse as Response, nextFunction);
380380
jest.useFakeTimers();
381381

0 commit comments

Comments
 (0)