Global rate limiter (#17296)

Co-authored-by: Azri Kahar <42867097+azrikahar@users.noreply.github.com>
This commit is contained in:
Brainslug
2023-03-02 13:04:22 +01:00
committed by GitHub
parent 512ca48680
commit ee7fca3e31
7 changed files with 151 additions and 17 deletions

View File

@@ -47,7 +47,8 @@ import { checkIP } from './middleware/check-ip';
import cors from './middleware/cors';
import errorHandler from './middleware/error-handler';
import extractToken from './middleware/extract-token';
import rateLimiter from './middleware/rate-limiter';
import rateLimiter from './middleware/rate-limiter-ip';
import rateLimiterGlobal from './middleware/rate-limiter-global';
import sanitizeQuery from './middleware/sanitize-query';
import schema from './middleware/schema';
@@ -209,6 +210,9 @@ export default async function createApp(): Promise<express.Application> {
}
// use the rate limiter - all routes for now
if (env.RATE_LIMITER_GLOBAL_ENABLED === true) {
app.use(rateLimiterGlobal);
}
if (env.RATE_LIMITER_ENABLED === true) {
app.use(rateLimiter);
}

View File

@@ -58,6 +58,7 @@ const allowedEnvironmentVars = [
'CORS_CREDENTIALS',
'CORS_MAX_AGE',
// rate limiting
'RATE_LIMITER_GLOBAL_.+',
'RATE_LIMITER_.+',
// cache
'CACHE_ENABLED',
@@ -205,10 +206,15 @@ const defaults: Record<string, any> = {
STORAGE_LOCAL_ROOT: './uploads',
RATE_LIMITER_ENABLED: false,
RATE_LIMITER_POINTS: 25,
RATE_LIMITER_POINTS: 50,
RATE_LIMITER_DURATION: 1,
RATE_LIMITER_STORE: 'memory',
RATE_LIMITER_GLOBAL_ENABLED: false,
RATE_LIMITER_GLOBAL_POINTS: 1000,
RATE_LIMITER_GLOBAL_DURATION: 1,
RATE_LIMITER_GLOBAL_STORE: 'memory',
ACCESS_TOKEN_TTL: '15m',
REFRESH_TOKEN_TTL: '7d',
REFRESH_TOKEN_COOKIE_SECURE: false,

View File

@@ -0,0 +1,53 @@
import { RequestHandler } from 'express';
import ms from 'ms';
import { RateLimiterMemcache, RateLimiterMemory, RateLimiterRedis } from 'rate-limiter-flexible';
import env from '../env';
import { HitRateLimitException } from '../exceptions/index';
import logger from '../logger';
import { createRateLimiter } from '../rate-limiter';
import asyncHandler from '../utils/async-handler';
import { validateEnv } from '../utils/validate-env';
const RATE_LIMITER_GLOBAL_KEY = 'global-rate-limit';
let checkRateLimit: RequestHandler = (_req, _res, next) => next();
export let rateLimiterGlobal: RateLimiterRedis | RateLimiterMemcache | RateLimiterMemory;
if (env.RATE_LIMITER_GLOBAL_ENABLED === true) {
validateEnv(['RATE_LIMITER_GLOBAL_STORE', 'RATE_LIMITER_GLOBAL_DURATION', 'RATE_LIMITER_GLOBAL_POINTS']);
validateConfiguration();
rateLimiterGlobal = createRateLimiter('RATE_LIMITER_GLOBAL');
checkRateLimit = asyncHandler(async (_req, res, next) => {
try {
await rateLimiterGlobal.consume(RATE_LIMITER_GLOBAL_KEY, 1);
} catch (rateLimiterRes: any) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(Math.round(rateLimiterRes.msBeforeNext / 1000)));
throw new HitRateLimitException(`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`, {
limit: +env.RATE_LIMITER_GLOBAL_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),
});
}
next();
});
}
export default checkRateLimit;
function validateConfiguration() {
if (env.RATE_LIMITER_ENABLED !== true) {
logger.error(`The IP based rate limiter needs to be enabled when using the global rate limiter.`);
process.exit(1);
}
const globalPointsPerSec =
Number(env.RATE_LIMITER_GLOBAL_POINTS) / Math.max(Number(env.RATE_LIMITER_GLOBAL_DURATION), 1);
const regularPointsPerSec = Number(env.RATE_LIMITER_POINTS) / Math.max(Number(env.RATE_LIMITER_DURATION), 1);
if (globalPointsPerSec <= regularPointsPerSec) {
logger.error(`The global rate limiter needs to allow more requests per second than the IP based rate limiter.`);
process.exit(1);
}
}

View File

@@ -8,13 +8,13 @@ import asyncHandler from '../utils/async-handler';
import { getIPFromReq } from '../utils/get-ip-from-req';
import { validateEnv } from '../utils/validate-env';
let checkRateLimit: RequestHandler = (req, res, next) => next();
let checkRateLimit: RequestHandler = (_req, _res, next) => next();
export let rateLimiter: RateLimiterRedis | RateLimiterMemcache | RateLimiterMemory;
if (env.RATE_LIMITER_ENABLED === true) {
validateEnv(['RATE_LIMITER_STORE', 'RATE_LIMITER_DURATION', 'RATE_LIMITER_POINTS']);
rateLimiter = createRateLimiter();
rateLimiter = createRateLimiter('RATE_LIMITER');
checkRateLimit = asyncHandler(async (req, res, next) => {
try {
@@ -22,7 +22,7 @@ if (env.RATE_LIMITER_ENABLED === true) {
} catch (rateLimiterRes: any) {
if (rateLimiterRes instanceof Error) throw rateLimiterRes;
res.set('Retry-After', String(rateLimiterRes.msBeforeNext / 1000));
res.set('Retry-After', String(Math.round(rateLimiterRes.msBeforeNext / 1000)));
throw new HitRateLimitException(`Too many requests, retry after ${ms(rateLimiterRes.msBeforeNext)}.`, {
limit: +env.RATE_LIMITER_POINTS,
reset: new Date(Date.now() + rateLimiterRes.msBeforeNext),

View File

@@ -12,35 +12,47 @@ import { getConfigFromEnv } from './utils/get-config-from-env';
type IRateLimiterOptionsOverrides = Partial<IRateLimiterOptions> | Partial<IRateLimiterStoreOptions>;
export function createRateLimiter(configOverrides?: IRateLimiterOptionsOverrides): RateLimiterAbstract {
export function createRateLimiter(
configPrefix = 'RATE_LIMITER',
configOverrides?: IRateLimiterOptionsOverrides
): RateLimiterAbstract {
switch (env.RATE_LIMITER_STORE) {
case 'redis':
return new RateLimiterRedis(getConfig('redis', configOverrides));
return new RateLimiterRedis(getConfig('redis', configPrefix, configOverrides));
case 'memcache':
return new RateLimiterMemcache(getConfig('memcache', configOverrides));
return new RateLimiterMemcache(getConfig('memcache', configPrefix, configOverrides));
case 'memory':
default:
return new RateLimiterMemory(getConfig('memory', configOverrides));
return new RateLimiterMemory(getConfig('memory', configPrefix, configOverrides));
}
}
function getConfig(store: 'memory', overrides?: IRateLimiterOptionsOverrides): IRateLimiterOptions;
function getConfig(store: 'redis' | 'memcache', overrides?: IRateLimiterOptionsOverrides): IRateLimiterStoreOptions;
function getConfig(
store: 'memory',
configPrefix: string,
overrides?: IRateLimiterOptionsOverrides
): IRateLimiterOptions;
function getConfig(
store: 'redis' | 'memcache',
configPrefix: string,
overrides?: IRateLimiterOptionsOverrides
): IRateLimiterStoreOptions;
function getConfig(
store: 'memory' | 'redis' | 'memcache' = 'memory',
configPrefix = 'RATE_LIMITER',
overrides?: IRateLimiterOptionsOverrides
): IRateLimiterOptions | IRateLimiterStoreOptions {
const config: any = getConfigFromEnv('RATE_LIMITER_', `RATE_LIMITER_${store}_`);
const config: any = getConfigFromEnv(`${configPrefix}_`, `${configPrefix}_${store}_`);
if (store === 'redis') {
const Redis = require('ioredis');
delete config.redis;
config.storeClient = new Redis(env.RATE_LIMITER_REDIS || getConfigFromEnv('RATE_LIMITER_REDIS_'));
config.storeClient = new Redis(env[`${configPrefix}_REDIS`] || getConfigFromEnv(`${configPrefix}_REDIS_`));
}
if (store === 'memcache') {
const Memcached = require('memcached');
config.storeClient = new Memcached(env.RATE_LIMITER_MEMCACHE, getConfigFromEnv('RATE_LIMITER_MEMCACHE_'));
config.storeClient = new Memcached(env[`${configPrefix}_MEMCACHE`], getConfigFromEnv(`${configPrefix}_MEMCACHE_`));
}
delete config.enabled;

View File

@@ -22,7 +22,7 @@ import { ActivityService } from './activity';
import { SettingsService } from './settings';
import { TFAService } from './tfa';
const loginAttemptsLimiter = createRateLimiter({ duration: 0 });
const loginAttemptsLimiter = createRateLimiter('RATE_LIMITER', { duration: 0 });
export class AuthenticationService {
knex: Knex;

View File

@@ -8,7 +8,7 @@ import { getCache } from '../cache';
import getDatabase, { hasDatabaseConnection } from '../database';
import env from '../env';
import logger from '../logger';
import { rateLimiter } from '../middleware/rate-limiter';
import { rateLimiter } from '../middleware/rate-limiter-ip';
import { getStorage } from '../storage';
import { AbstractServiceOptions } from '../types';
import { Accountability, SchemaOverview } from '@directus/shared/types';
@@ -17,6 +17,7 @@ import getMailer from '../mailer';
import { SettingsService } from './settings';
import { getOSInfo } from '../utils/get-os-info';
import { Readable } from 'node:stream';
import { rateLimiterGlobal } from '../middleware/rate-limiter-global';
export class ServerService {
knex: Knex;
@@ -59,6 +60,14 @@ export class ServerService {
} else {
info.rateLimit = false;
}
if (env.RATE_LIMITER_GLOBAL_ENABLED) {
info.rateLimitGlobal = {
points: env.RATE_LIMITER_GLOBAL_POINTS,
duration: env.RATE_LIMITER_GLOBAL_DURATION,
};
} else {
info.rateLimitGlobal = false;
}
info.flows = {
execAllowedModules: env.FLOWS_EXEC_ALLOWED_MODULES ? toArray(env.FLOWS_EXEC_ALLOWED_MODULES) : [],
@@ -117,7 +126,14 @@ export class ServerService {
releaseId: version,
serviceId: env.KEY,
checks: merge(
...(await Promise.all([testDatabase(), testCache(), testRateLimiter(), testStorage(), testEmail()]))
...(await Promise.all([
testDatabase(),
testCache(),
testRateLimiter(),
testRateLimiterGlobal(),
testStorage(),
testEmail(),
]))
),
};
@@ -286,6 +302,49 @@ export class ServerService {
return checks;
}
async function testRateLimiterGlobal(): Promise<Record<string, HealthCheck[]>> {
if (env.RATE_LIMITER_GLOBAL_ENABLED !== true) {
return {};
}
const checks: Record<string, HealthCheck[]> = {
'rateLimiterGlobal:responseTime': [
{
status: 'ok',
componentType: 'ratelimiter',
observedValue: 0,
observedUnit: 'ms',
threshold: env.RATE_LIMITER_GLOBAL_HEALTHCHECK_THRESHOLD
? +env.RATE_LIMITER_GLOBAL_HEALTHCHECK_THRESHOLD
: 150,
},
],
};
const startTime = performance.now();
try {
await rateLimiterGlobal.consume(`health-${checkID}`, 1);
await rateLimiterGlobal.delete(`health-${checkID}`);
} catch (err: any) {
checks['rateLimiterGlobal:responseTime'][0].status = 'error';
checks['rateLimiterGlobal:responseTime'][0].output = err;
} finally {
const endTime = performance.now();
checks['rateLimiterGlobal:responseTime'][0].observedValue = +(endTime - startTime).toFixed(3);
if (
checks['rateLimiterGlobal:responseTime'][0].observedValue >
checks['rateLimiterGlobal:responseTime'][0].threshold! &&
checks['rateLimiterGlobal:responseTime'][0].status !== 'error'
) {
checks['rateLimiterGlobal:responseTime'][0].status = 'warn';
}
}
return checks;
}
async function testStorage(): Promise<Record<string, HealthCheck[]>> {
const storage = await getStorage();