Merge rate limiter setup to middleware

This commit is contained in:
rijkvanzanten
2020-09-08 15:52:09 -04:00
parent 457a9859aa
commit 8d13f311b0
2 changed files with 59 additions and 115 deletions

View File

@@ -1,29 +1,67 @@
/**
* RateLimiter using Redis
* and rate-limiter-flexible
* can extend with further options
* in future
*/
import { RequestHandler } from 'express';
import asyncHandler from 'express-async-handler';
import { RateLimiterMemory, RateLimiterRedis, RateLimiterMemcache, IRateLimiterOptions, IRateLimiterStoreOptions, RateLimiterStoreAbstract } from 'rate-limiter-flexible';
import env from '../env';
import { getConfigFromEnv } from '../utils/get-config-from-env';
import { HitRateLimitException } from '../exceptions';
import { RedisNotFoundException } from '../exceptions';
import rateLimiterConfig from '../rate-limiter';
import ms from 'ms';
import { validateEnv } from '../utils/validate-env';
const rateLimiter: RequestHandler = asyncHandler(async (req, res, next) => {
try {
await rateLimiterConfig.consume(req.ip);
} catch (rejRes) {
if (rejRes instanceof Error) {
throw new RedisNotFoundException('Redis is having some trouble connecting');
let checkRateLimit: RequestHandler = (req, res, next) => next();
if (env.RATE_LIMITER_ENABLED === true) {
validateEnv(['RATE_LIMITER_STORE', 'RATE_LIMITER_DURATION', 'RATE_LIMITER_POINTS']);
const rateLimiter = getRateLimiter();
checkRateLimit = asyncHandler(async (req, res, next) => {
try {
await rateLimiter.consume(req.ip, 1);
} catch (rateLimiterRes) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000));
throw new HitRateLimitException(`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`, {
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext)
});
}
// If there is no error, rateLimiterRedis promise rejected with number of ms before next request allowed
const secs = Math.round(rejRes.msBeforeNext / 1000) || 1;
res.set('Retry-After', String(secs));
throw new HitRateLimitException(`Too many requests, retry after ${secs}.`);
next();
});
}
export default checkRateLimit;
function getRateLimiter() {
switch(env.RATE_LIMITER_STORE) {
case 'redis':
return new RateLimiterRedis(getConfig('redis'));
case 'memcache':
return new RateLimiterMemcache(getConfig('memcache'));
case 'memory':
default:
return new RateLimiterMemory(getConfig());
}
}
function getConfig(store?: 'memory'): IRateLimiterOptions;
function getConfig(store: 'redis' | 'memcache'): IRateLimiterStoreOptions;
function getConfig(store: 'memory' | 'redis' | 'memcache' = 'memory'): IRateLimiterOptions | IRateLimiterStoreOptions {
const config: any = getConfigFromEnv('RATE_LIMITER_', `RATE_LIMITER_${store}_`);
if (store === 'redis') {
const Redis = require('ioredis');
config.storeClient = new Redis(env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_'));
}
return next();
});
if (store === 'memcache') {
const Memcached = require('memcached');
config.storeClient = new Memcached(env.RATE_LIMITER_MEMCACHE, getConfigFromEnv('RATE_LIMITER_MEMCACHE_'));
}
export default rateLimiter;
delete config.enabled;
delete config.store;
return config;
}