How to use rate limiting in Node.js

Rate limiting restricts the number of requests a client can make to your API within a time window, preventing abuse and ensuring fair resource usage. As the creator of CoreUI with 12 years of Node.js backend experience, I’ve implemented rate limiting strategies that protected APIs handling billions of requests daily from DDoS attacks and abuse for enterprise applications.

The most effective approach uses express-rate-limit middleware with Redis for distributed rate limiting.

Basic Rate Limiting

Install package:

npm install express-rate-limit

Implement:

const express = require('express')
const rateLimit = require('express-rate-limit')

const app = express()

const limiter = rateLimit({
  windowMs: 15 * 60 * 1000, // 15 minutes
  max: 100, // Limit each IP to 100 requests per windowMs
  message: 'Too many requests from this IP, please try again later'
})

app.use(limiter)

app.get('/api/data', (req, res) => {
  res.json({ message: 'Success' })
})

app.listen(3000)

Per-Route Rate Limiting

const express = require('express')
const rateLimit = require('express-rate-limit')

const app = express()

// Strict limit for auth endpoints
const authLimiter = rateLimit({
  windowMs: 15 * 60 * 1000, // 15 minutes
  max: 5, // 5 requests per 15 minutes
  message: 'Too many login attempts, please try again later'
})

// Standard limit for API endpoints
const apiLimiter = rateLimit({
  windowMs: 15 * 60 * 1000,
  max: 100,
  message: 'Too many requests'
})

app.post('/api/login', authLimiter, (req, res) => {
  // Login logic
  res.json({ success: true })
})

app.get('/api/users', apiLimiter, (req, res) => {
  // Users logic
  res.json({ users: [] })
})

app.listen(3000)

Redis Store for Distributed Systems

Install Redis store:

npm install rate-limit-redis redis

Implement:

const express = require('express')
const rateLimit = require('express-rate-limit')
const RedisStore = require('rate-limit-redis')
const { createClient } = require('redis')

const app = express()

const redisClient = createClient({
  host: 'localhost',
  port: 6379
})

const limiter = rateLimit({
  store: new RedisStore({
    client: redisClient,
    prefix: 'rate-limit:'
  }),
  windowMs: 15 * 60 * 1000,
  max: 100
})

app.use('/api/', limiter)

app.listen(3000)

Custom Key Generator

Rate limit by user ID instead of IP:

const limiter = rateLimit({
  windowMs: 15 * 60 * 1000,
  max: 100,
  keyGenerator: (req) => {
    // Use user ID if authenticated, otherwise IP
    return req.user ? req.user.id.toString() : req.ip
  },
  skip: (req) => {
    // Skip rate limiting for admins
    return req.user && req.user.role === 'admin'
  }
})

app.use('/api/', limiter)

Custom Response

const limiter = rateLimit({
  windowMs: 15 * 60 * 1000,
  max: 100,
  handler: (req, res) => {
    res.status(429).json({
      error: 'Too many requests',
      retryAfter: req.rateLimit.resetTime
    })
  },
  onLimitReached: (req, res, options) => {
    console.warn(`Rate limit reached for ${req.ip}`)
  }
})

Sliding Window Rate Limiting

const rateLimit = require('express-rate-limit')

const limiter = rateLimit({
  windowMs: 60 * 1000, // 1 minute
  max: 10,
  skipSuccessfulRequests: false,
  skipFailedRequests: false,
  standardHeaders: true, // Return rate limit info in headers
  legacyHeaders: false
})

app.use('/api/', limiter)

Headers returned:

RateLimit-Limit: 10
RateLimit-Remaining: 5
RateLimit-Reset: 1234567890

Token Bucket Algorithm

class TokenBucket {
  constructor(capacity, refillRate) {
    this.capacity = capacity
    this.tokens = capacity
    this.refillRate = refillRate
    this.lastRefill = Date.now()
  }

  tryConsume() {
    this.refill()

    if (this.tokens >= 1) {
      this.tokens -= 1
      return true
    }

    return false
  }

  refill() {
    const now = Date.now()
    const timePassed = (now - this.lastRefill) / 1000
    const tokensToAdd = timePassed * this.refillRate

    this.tokens = Math.min(this.capacity, this.tokens + tokensToAdd)
    this.lastRefill = now
  }
}

const buckets = new Map()

function rateLimitMiddleware(req, res, next) {
  const key = req.ip

  if (!buckets.has(key)) {
    buckets.set(key, new TokenBucket(10, 1)) // 10 tokens, refill 1/sec
  }

  const bucket = buckets.get(key)

  if (bucket.tryConsume()) {
    next()
  } else {
    res.status(429).json({ error: 'Rate limit exceeded' })
  }
}

app.use('/api/', rateLimitMiddleware)

Multiple Tiers

const createLimiter = (windowMs, max) => {
  return rateLimit({ windowMs, max })
}

// Different limits for different user tiers
function tierBasedLimiter(req, res, next) {
  const tier = req.user?.tier || 'free'

  const limiters = {
    free: createLimiter(15 * 60 * 1000, 50),
    pro: createLimiter(15 * 60 * 1000, 500),
    enterprise: createLimiter(15 * 60 * 1000, 5000)
  }

  const limiter = limiters[tier]
  limiter(req, res, next)
}

app.use('/api/', tierBasedLimiter)

Best Practice Note

This is the same rate limiting strategy we use in CoreUI enterprise APIs to prevent abuse and ensure fair usage. Always implement rate limiting on public endpoints, use Redis for distributed systems across multiple servers, and return clear error messages with retry-after information. Set stricter limits on authentication endpoints and implement tiered limits based on user subscription levels.

For related security patterns, check out how to prevent brute force attacks in Node.js and how to implement authentication in Node.js.


Speed up your responsive apps and websites with fully-featured, ready-to-use open-source admin panel templates—free to use and built for efficiency.


About the Author