Build a Rate Limiting Middleware
Introduction
Rate limiting protects your API from abuse, prevents DoS attacks, and ensures fair resource allocation. In this tutorial, we'll build a flexible rate limiting middleware with multiple algorithms and storage backends.
What You'll Build
- Token bucket algorithm
- Fixed window counter
- Sliding window log
- In-memory and Redis storage
Core Concepts
Algorithms
- Fixed Window - Simple counter per time window
- Sliding Window - More accurate rolling window
- Token Bucket - Smooth rate limiting with burst support
Prerequisites
- Node.js 14+
- Redis (optional)
Rate Limiter
Create ratelimit/limiter.js:
class RateLimiter {
constructor(options = {}) {
this.windowMs = options.windowMs || 60000;
this.maxRequests = options.maxRequests || 100;
this.keyGenerator = options.keyGenerator || ((req) => req.ip);
this.handler = options.handler || ((req, res) => {
res.status(429).json({ error: 'Too many requests' });
});
this.skipSuccessfulRequests = options.skipSuccessfulRequests || false;
this.skipFailedRequests = options.skipFailedRequests || false;
this.store = options.store || new MemoryStore();
}
async middleware() {
return async (req, res, next) => {
const key = this.keyGenerator(req);
const result = await this.store.increment(key, this.windowMs, this.maxRequests);
if (!result.success) {
res.set('Retry-After', Math.ceil(result.resetIn / 1000));
return this.handler(req, res);
}
res.set('X-RateLimit-Limit', this.maxRequests);
res.set('X-RateLimit-Remaining', result.remaining);
res.set('X-RateLimit-Reset', result.resetAt);
const originalSend = res.send;
res.send = (body) => {
if (this.skipSuccessfulRequests && res.statusCode < 400) {
this.store.decrement(key);
}
if (this.skipFailedRequests && res.statusCode >= 400) {
this.store.decrement(key);
}
return originalSend.call(res, body);
};
next();
};
}
}
class MemoryStore {
constructor() {
this.windowRequests = new Map();
}
async increment(key, windowMs, maxRequests) {
const now = Date.now();
const windowStart = now - windowMs;
if (!this.windowRequests.has(key)) {
this.windowRequests.set(key, []);
}
const requests = this.windowRequests.get(key);
const validRequests = requests.filter(t => t > windowStart);
if (validRequests.length >= maxRequests) {
const resetAt = requests[0] + windowMs;
return {
success: false,
remaining: 0,
resetAt: Math.ceil(resetAt / 1000),
resetIn: resetAt - now
};
}
validRequests.push(now);
this.windowRequests.set(key, validRequests);
return {
success: true,
remaining: maxRequests - validRequests.length,
resetAt: Math.ceil((now + windowMs) / 1000),
resetIn: windowMs
};
}
async decrement(key) {
const requests = this.windowRequests.get(key);
if (requests && requests.length > 0) {
requests.shift();
}
}
async reset(key) {
this.windowRequests.delete(key);
}
}
module.exports = { RateLimiter, MemoryStore };
Storage Backends
class RedisStore {
constructor(redisClient, options = {}) {
this.redis = redisClient;
this.prefix = options.prefix || 'ratelimit:';
}
async increment(key, windowMs, maxRequests) {
const now = Date.now();
const windowKey = this.prefix + key;
const resetTime = Math.ceil((now + windowMs) / 1000);
const multi = this.redis.multi();
multi.zadd(windowKey, now, `${now}-${Math.random()}`);
multi.zremrangebyscore(windowKey, 0, now - windowMs);
multi.zcard(windowKey);
multi.expire(windowKey, Math.ceil(windowMs / 1000));
const results = await multi.exec();
const currentCount = results[2][1];
if (currentCount > maxRequests) {
return {
success: false,
remaining: 0,
resetAt: resetTime,
resetIn: windowMs
};
}
return {
success: true,
remaining: maxRequests - currentCount,
resetAt: resetTime,
resetIn: windowMs
};
}
async decrement(key) {
await this.redis.zrange(this.prefix + key, 0, 0).then(results => {
if (results.length > 0) {
this.redis.zrem(this.prefix + key, results[0]);
}
});
}
async reset(key) {
await this.redis.del(this.prefix + key);
}
}
class TokenBucketStore {
constructor() {
this.buckets = new Map();
}
async increment(key, windowMs, maxRequests) {
const now = Date.now();
if (!this.buckets.has(key)) {
this.buckets.set(key, {
tokens: maxRequests,
lastRefill: now
});
}
const bucket = this.buckets.get(key);
const timePassed = now - bucket.lastRefill;
const tokensToAdd = Math.floor(timePassed / windowMs * maxRequests);
bucket.tokens = Math.min(maxRequests, bucket.tokens + tokensToAdd);
bucket.lastRefill = now;
if (bucket.tokens < 1) {
return {
success: false,
remaining: 0,
resetAt: Math.ceil((now + windowMs) / 1000),
resetIn: windowMs
};
}
bucket.tokens -= 1;
return {
success: true,
remaining: Math.floor(bucket.tokens),
resetAt: Math.ceil((now + windowMs) / 1000),
resetIn: windowMs
};
}
async decrement(key) {
const bucket = this.buckets.get(key);
if (bucket) {
bucket.tokens = Math.min(bucket.tokens + 1, 100);
}
}
async reset(key) {
this.buckets.delete(key);
}
}
module.exports = { RedisStore, TokenBucketStore };
Express Middleware
const { RateLimiter, MemoryStore } = require('./limiter');
const rateLimiter = new RateLimiter({
windowMs: 15 * 60 * 1000,
maxRequests: 100,
keyGenerator: (req) => req.ip,
handler: (req, res) => {
res.status(429).json({
error: 'Too many requests',
message: 'Please try again later'
});
}
});
const strictRateLimiter = new RateLimiter({
windowMs: 60 * 1000,
maxRequests: 10,
keyGenerator: (req) => req.user?.id || req.ip
});
const authRateLimiter = new RateLimiter({
windowMs: 15 * 60 * 1000,
maxRequests: 5,
keyGenerator: (req) => req.ip + ':auth'
});
module.exports = {
rateLimiter,
strictRateLimiter,
authRateLimiter,
RateLimiter
};
Testing
const request = require('supertest');
const express = require('express');
const { RateLimiter, MemoryStore } = require('./limiter');
const limiter = new RateLimiter({
windowMs: 1000,
maxRequests: 3,
store: new MemoryStore()
});
const app = express();
app.use(limiter.middleware());
app.get('/api', (req, res) => res.json({ ok: true }));
describe('Rate Limiter', () => {
it('should allow requests within limit', async () => {
const res = await request(app).get('/api');
expect(res.status).toBe(200);
});
it('should block requests over limit', async () => {
for (let i = 0; i < 4; i++) {
await request(app).get('/api');
}
const res = await request(app).get('/api');
expect(res.status).toBe(429);
});
});
Summary
You built a rate limiting middleware with multiple algorithms, storage backends, and flexible configuration.