Caching is a fundamental technique for improving application performance by temporarily storing frequently accessed data. When building scalable applications, distributed caching becomes essential for managing large-scale traffic while maintaining low latency. This chapter dives deep into implementing distributed caching in Express.js, covering foundational concepts, practical steps, advanced patterns, and real-world examples.
Caching is the process of storing data in a temporary storage layer to serve future requests faster. Instead of fetching the data from a slow database or external API, it retrieves the data from a faster cache layer.
Distributed caching refers to a caching system spread across multiple nodes or servers. Instead of relying on a single cache store, data is distributed among various nodes to ensure scalability and fault tolerance.
Popular tools include:
Install Redis on your system or use a cloud-hosted Redis (e.g., AWS ElastiCache, Azure Cache for Redis).
# Install Redis locally
sudo apt update
sudo apt install redis
const cache = new Map(); // Simple in-memory cache
function cacheMiddleware(req, res, next) {
const key = req.originalUrl;
if (cache.has(key)) {
return res.send(cache.get(key)); // Serve from cache
}
res.sendResponse = res.send;
res.send = (body) => {
cache.set(key, body); // Save response in cache
res.sendResponse(body);
};
next();
}
const express = require('express');
const app = express();
app.use(cacheMiddleware);
app.get('/data', (req, res) => {
// Simulate slow database call
setTimeout(() => res.send({ data: 'Hello, World!' }), 2000);
});
app.listen(3000, () => console.log('Server running on port 3000'));
npm install redis
const express = require('express');
const redis = require('redis');
const app = express();
const client = redis.createClient(); // Connect to Redis
client.on('error', (err) => console.log('Redis Error:', err));
// Middleware for Redis caching
function redisCache(req, res, next) {
const key = req.originalUrl;
client.get(key, (err, data) => {
if (err) throw err;
if (data) {
return res.send(JSON.parse(data)); // Serve from Redis cache
}
res.sendResponse = res.send;
res.send = (body) => {
client.setex(key, 3600, JSON.stringify(body)); // Cache for 1 hour
res.sendResponse(body);
};
next();
});
}
app.use(redisCache);
app.get('/data', (req, res) => {
setTimeout(() => res.send({ data: 'Hello, Redis!' }), 2000);
});
app.listen(3000, () => console.log('Server running on port 3000'));
get
fetches data from the cache.setex
stores data in the cache with an expiration time.
client.del('/data'); // Manually invalidate cache
Distributed caching is a powerful technique to enhance the performance and scalability of Express.js applications. By storing frequently accessed data in systems like Redis, applications can handle large-scale traffic with reduced latency. This chapter covered everything from the basics of caching to advanced topics like sharding and monitoring, equipping you with the knowledge to build robust caching solutions. Happy coding !❤️