← Back to Tutorials
Node.js

Build a Reverse Proxy Server

Difficulty: Advanced Est. Time: ~4 hours

Introduction

A reverse proxy sits between clients and backend servers, forwarding client requests to appropriate servers. It provides load balancing, caching, security, and SSL termination.

In this tutorial, we'll build "ProxyServer" - a reverse proxy with load balancing, caching, and health checking.

What You'll Build
  • HTTP/HTTPS proxy server
  • Load balancing algorithms
  • Response caching
  • Health checking
  • Request routing

Core Concepts

Understanding reverse proxy architecture.

Load Balancing Strategies

  • Round Robin - Sequential distribution
  • Least Connections - Fewest active connections
  • Weighted - Based on server capacity
  • IP Hash - Consistent hashing by client IP

Project Setup

Bash
mkdir reverse-proxy
cd reverse-proxy
npm init -y
npm install express http-proxy-middleware https fs-extra

Proxy Core

Let's build the core proxy functionality.

JavaScript
// proxy.js
const http = require('http');
const https = require('https');
const url = require('url');

class ReverseProxy {
    constructor(options = {}) {
        this.servers = [];
        this.currentIndex = 0;
        this.algorithm = options.algorithm || 'round-robin';
        this.timeout = options.timeout || 30000;
    }
    
    addServer(host, port) {
        this.servers.push({ host, port, weight: 1, connections: 0 });
    }
    
    getServer() {
        if (this.servers.length === 0) return null;
        
        switch (this.algorithm) {
            case 'round-robin':
                const server = this.servers[this.currentIndex];
                this.currentIndex = (this.currentIndex + 1) % this.servers.length;
                return server;
            
            case 'least-connections':
                return this.servers.reduce((prev, curr) => 
                    prev.connections < curr.connections ? prev : curr);
            
            default:
                return this.servers[0];
        }
    }
    
    handleRequest(req, res) {
        const target = this.getServer();
        
        if (!target) {
            res.writeHead(502);
            res.end('Bad Gateway');
            return;
        }
        
        target.connections++;
        
        const proxyReq = http.request({
            hostname: target.host,
            port: target.port,
            path: req.url,
            method: req.method,
            headers: req.headers,
            timeout: this.timeout
        }, (proxyRes) => {
            res.writeHead(proxyRes.statusCode, proxyRes.headers);
            proxyRes.pipe(res);
        });
        
        proxyReq.on('close', () => {
            target.connections--;
        });
        
        proxyReq.on('error', (err) => {
            res.writeHead(502);
            res.end(`Proxy Error: ${err.message}`);
            target.connections--;
        });
        
        req.pipe(proxyReq);
    }
}

const proxy = new ReverseProxy({ algorithm: 'round-robin' });
proxy.addServer('localhost', 3001);
proxy.addServer('localhost', 3002);

const server = http.createServer((req, res) => {
    proxy.handleRequest(req, res);
});

server.listen(8080, () => {
    console.log("Reverse proxy running on port 8080");
});

Load Balancing

Let's add more load balancing algorithms.

JavaScript
// loadbalancer.js
const crypto = require('crypto');

class LoadBalancer {
    constructor() {
        this.servers = [];
    }
    
    addServer(host, port, weight = 1) {
        this.servers.push({
            host, port, weight,
            activeConnections: 0,
            totalRequests: 0,
            lastUsed: Date.now()
        });
    }
    
    // Weighted Round Robin
    weightedRoundRobin() {
        let totalWeight = this.servers.reduce((sum, s) => sum + s.weight, 0);
        let cursor = Math.floor(Math.random() * totalWeight);
        
        for (let server of this.servers) {
            cursor -= server.weight;
            if (cursor < 0) {
                server.totalRequests++;
                return server;
            }
        }
        
        return this.servers[0];
    }
    
    // IP Hash - consistent routing
    ipHash(ip) {
        const hash = crypto.createHash('md5').update(ip).digest('hex');
        const index = parseInt(hash, 16) % this.servers.length;
        return this.servers[index];
    }
    
    // Least Response Time
    leastResponseTime() {
        return this.servers.reduce((best, current) => {
            const bestResponseTime = best.totalRequests / (best.activeConnections || 1);
            const currentResponseTime = current.totalRequests / (current.activeConnections || 1);
            return currentResponseTime < bestResponseTime ? current : best;
        });
    }
}

Response Caching

Let's add caching to the proxy.

JavaScript
// cache.js
class ProxyCache {
    constructor(maxSize = 100, ttl = 300000) {
        this.cache = new Map();
        this.maxSize = maxSize;
        generateKey(method, url, headers) {
        return `${method}:${url}:${headers['authorization'] || ''`;
    }
    
    get(key) {
        if (!this.cache.has(key)) return null;
        
        const entry = this.cache.get(key);
        
        if Date.now() > entry.expiresAt) {
            this.cache.delete(key);
            return null;
        }
        
        return entry.response;
    }
    
    set(key, response, ttl = this.ttl) {
        if (this.cache.size >= this.maxSize) {
            const firstKey = this.cache.keys().next().value;
            this.cache.delete(firstKey);
        }
        
        this.cache.set(key, {
            response,
            expiresAt: Date.now() + ttl,
            createdAt: Date.now()
        });
    }
    
    invalidate(pattern) {
        for (const key of this.cache.keys()) {
            if key.includes(pattern)) {
                this.cache.delete(key);
            }
        }
    }
}

Testing

Bash
# Test the proxy

# Start backend servers
node server1.js &  # on port 3001
node server2.js &  # on port 3002

# Start proxy
node proxy.js

# Make requests
curl http://localhost:8080/api/users
curl http://localhost:8080/api/products

# Check load distribution
for i in {1..10}; do curl -s http://localhost:8080/api/status; done
Testing Checklist
  • Requests are forwarded to backend servers
  • Load balancing distributes traffic
  • Caching works correctly
  • Health checking detects failures

Summary

You've built a complete reverse proxy server.

What You Built

  • Proxy Core - Request forwarding
  • Load Balancing - Multiple algorithms
  • Caching - Response caching
  • Health Checks - Server monitoring

Continue Learning

  • Build an API Gateway
  • Build a Load Balancer