The Leaky Bucket algorithm is another mechanism for rate limiting. Unlike the token bucket where burst tokens can be stored, the leaky bucket models a physical queue. Requests "pour" into the bucket. If the bucket is full, incoming requests spill over (are dropped). The bucket then "leaks" (processes requests) out of the bottom at a strict, constant rate. This fundamentally smooths out traffic bursts into a steady stream.
import time
from collections import deque
class LeakyBucket:
def __init__(self, capacity: int, leak_rate: float):
self.capacity = capacity
self.queue = deque()
self.leak_rate = leak_rate # reqs per sec
self.last_leak = time.time()
def leak(self):
now = time.time()
elapsed = now - self.last_leak
number_to_leak = int(elapsed * self.leak_rate)
if number_to_leak > 0:
for _ in range(min(number_to_leak, len(self.queue))):
self.queue.popleft() # Process the request
self.last_leak = now
def add_request(self, req) -> bool:
self.leak() # Leak old requests first
if len(self.queue) < self.capacity:
self.queue.append(req)
return True
return False # Bucket is full, drop request
class LeakyBucket {
constructor(capacity, leakRatePerSecond) {
this.capacity = capacity;
this.queue = [];
this.leakRate = leakRatePerSecond;
this.lastLeak = Date.now();
}
leak() {
const now = Date.now();
const elapsedSeconds = (now - this.lastLeak) / 1000;
const numberToLeak = Math.floor(elapsedSeconds * this.leakRate);
if (numberToLeak > 0) {
for(let i=0; i < Math.min(numberToLeak, this.queue.length); i++) {
this.queue.shift(); // Process head of Queue
}
this.lastLeak = now;
}
}
addRequest(req) {
this.leak();
if (this.queue.length < this.capacity) {
this.queue.push(req);
return true;
}
return false; // Dropped
}
}