5 min read
•Question 58 of 62hardHow to optimize Node.js performance?
Performance optimization techniques.
What You'll Learn
- Profiling techniques
- Optimization strategies
- Best practices
Profiling
$ terminalBash
# CPU profiling
node --prof app.js
node --prof-process isolate-*.log > profile.txt
# Using clinic.js
npx clinic doctor -- node app.js
npx clinic flame -- node app.jsKey Optimizations
1. Use Clustering
code.jsJavaScript
const cluster = require('cluster');
const os = require('os');
if (cluster.isMaster) {
for (let i = 0; i < os.cpus().length; i++) {
cluster.fork();
}
} else {
require('./app');
}2. Caching
code.jsJavaScript
const cache = new Map();
function getCachedData(key, fetchFn, ttl = 60000) {
const cached = cache.get(key);
if (cached && Date.now() < cached.expires) {
return cached.data;
}
const data = fetchFn();
cache.set(key, { data, expires: Date.now() + ttl });
return data;
}3. Avoid Blocking Operations
code.jsJavaScript
// ❌ Blocks event loop
const data = fs.readFileSync('large-file.txt');
// ✅ Non-blocking
const data = await fs.promises.readFile('large-file.txt');
// ❌ CPU-intensive in main thread
const hash = crypto.pbkdf2Sync(password, salt, 100000, 64, 'sha512');
// ✅ Use worker or async
const hash = await new Promise((resolve, reject) => {
crypto.pbkdf2(password, salt, 100000, 64, 'sha512', (err, key) => {
if (err) reject(err);
else resolve(key);
});
});4. Stream Large Data
code.jsJavaScript
// ❌ Load all into memory
app.get('/file', (req, res) => {
const data = fs.readFileSync('large-file.txt');
res.send(data);
});
// ✅ Stream
app.get('/file', (req, res) => {
fs.createReadStream('large-file.txt').pipe(res);
});