Node.js Zlib Module
Introduction to the Zlib Module
The Zlib module provides bindings to the zlib and brotli compression libraries, enabling you to:
- Compress and decompress files and data streams
- Implement HTTP compression
- Work with compressed file formats (.gz, .zip)
- Optimize bandwidth usage in web applications
Importing the Zlib Module
const zlib = require('zlib');
Compression Methods
The Zlib module supports several compression methods:
Method | Description |
---|---|
Gzip/Gunzip | The most widely used compression format, especially for web content |
Deflate/Inflate | The raw deflate algorithm without headers or checksums |
DeflateRaw/InflateRaw | Raw deflate algorithm with custom header and checksum handling |
Brotli | Modern compression algorithm offering better ratios (added in Node.js 10.16.0) |
Basic Compression and Decompression
Using Callbacks
const zlib = require('zlib');
const input = 'This is some text that will be compressed using the zlib module in Node.js.';
// Compress data using gzip
zlib.gzip(input, (err, compressed) => {
if (err) {
console.error('Compression error:', err);
return;
}
console.log('Original size:', input.length, 'bytes');
console.log('Compressed size:', compressed.length, 'bytes');
console.log('Compression ratio:', Math.round(100 - (compressed.length / input.length * 100)) + '%');
// Decompress the data
zlib.gunzip(compressed, (err, decompressed) => {
if (err) {
console.error('Decompression error:', err);
return;
}
console.log('Decompressed data:', decompressed.toString());
console.log('Successfully decompressed:', input === decompressed.toString());
});
});
Using Promises
const zlib = require('zlib');
const { promisify } = require('util');
// Convert callback-based functions to promise-based
const gzipPromise = promisify(zlib.gzip);
const gunzipPromise = promisify(zlib.gunzip);
async function compressAndDecompress(input) {
try {
// Compress
const compressed = await gzipPromise(input);
console.log('Original size:', input.length, 'bytes');
console.log('Compressed size:', compressed.length, 'bytes');
// Decompress
const decompressed = await gunzipPromise(compressed);
console.log('Decompressed data:', decompressed.toString());
console.log('Success:', input === decompressed.toString());
return compressed;
} catch (err) {
console.error('Error:', err);
}
}
// Example usage
const testData = 'This is some test data that will be compressed with the zlib module.';
compressAndDecompress(testData);
Working with Streams
The Zlib module can be used with streams for processing large files or data:
const zlib = require('zlib');
const fs = require('fs');
const path = require('path');
// Compress a file
function compressFile(inputPath) {
const outputPath = inputPath + '.gz';
// Create read and write streams
const input = fs.createReadStream(inputPath);
const output = fs.createWriteStream(outputPath);
// Create gzip transform stream
const gzip = zlib.createGzip();
// Pipe data through the compression stream
input.pipe(gzip).pipe(output);
// Handle events
input.on('error', (err) => console.error('Input error:', err));
gzip.on('error', (err) => console.error('Compression error:', err));
output.on('error', (err) => console.error('Output error:', err));
output.on('finish', () => {
console.log(`File compressed successfully: ${outputPath}`);
// Get file sizes for comparison
const inputStats = fs.statSync(inputPath);
const outputStats = fs.statSync(outputPath);
console.log(`Original size: ${inputStats.size} bytes`);
console.log(`Compressed size: ${outputStats.size} bytes`);
console.log(`Compression ratio: ${Math.round(100 - (outputStats.size / inputStats.size * 100))}%`);
});
}
// Decompress a file
function decompressFile(inputPath) {
// Remove .gz extension for output path
const outputPath = inputPath.endsWith('.gz')
? inputPath.slice(0, -3)
: inputPath + '.uncompressed';
// Create streams
const input = fs.createReadStream(inputPath);
const output = fs.createWriteStream(outputPath);
const gunzip = zlib.createGunzip();
// Pipe data through decompression stream
input.pipe(gunzip).pipe(output);
// Handle events
input.on('error', (err) => console.error('Input error:', err));
gunzip.on('error', (err) => console.error('Decompression error:', err));
output.on('error', (err) => console.error('Output error:', err));
output.on('finish', () => {
console.log(`File decompressed successfully: ${outputPath}`);
});
}
// Example usage (assuming you have a text file)
// compressFile('example.txt');
// decompressFile('example.txt.gz');
// Note: Uncomment the above lines to actually run the compression/decompression
console.log('This example shows how to compress and decompress files using streams.');
console.log('Create a text file named "example.txt" and uncomment the function calls to test.');
Note: Using streams is memory-efficient for processing large files since the entire file doesn't need to be loaded into memory at once.
HTTP Compression
The Zlib module is commonly used for HTTP compression to reduce bandwidth usage:
const http = require('http');
const zlib = require('zlib');
// Create an HTTP server with compression
const server = http.createServer((req, res) => {
// Sample response content
const responseBody = `
<!DOCTYPE html>
<html>
<head>
<title>Zlib Compression Example</title>
</head>
<body>
<h1>HTTP Compression with Zlib</h1>
<p>This content is being compressed with Gzip before sending to your browser.</p>
<p>Compression reduces bandwidth usage and improves page load times.</p>
${'<p>This paragraph is repeated to demonstrate compression efficiency.</p>'.repeat(50)}
</body>
</html>
`;
// Check if client accepts gzip encoding
const acceptEncoding = req.headers['accept-encoding'] || '';
// Set content type
res.setHeader('Content-Type', 'text/html');
// Compress response if client supports it
if (/\bgzip\b/.test(acceptEncoding)) {
// Client supports gzip
res.setHeader('Content-Encoding', 'gzip');
// Compress and send
zlib.gzip(responseBody, (err, compressed) => {
if (err) {
res.statusCode = 500;
res.end('Internal Server Error');
return;
}
res.end(compressed);
});
} else if (/\bdeflate\b/.test(acceptEncoding)) {
// Client supports deflate
res.setHeader('Content-Encoding', 'deflate');
// Compress and send
zlib.deflate(responseBody, (err, compressed) => {
if (err) {
res.statusCode = 500;
res.end('Internal Server Error');
return;
}
res.end(compressed);
});
} else {
// No compression supported
res.end(responseBody);
}
});
// Start server on port 8080
const PORT = 8080;
server.listen(PORT, () => {
console.log(`Server running at http://localhost:${PORT}/`);
console.log('Open this URL in your browser to see compression in action');
console.log('The browser will automatically decompress the content');
});
Working with Brotli Compression
Brotli is a modern compression algorithm that often achieves better compression ratios than Gzip:
const zlib = require('zlib');
// Sample data to compress
const input = 'This is some test data that will be compressed with different algorithms for comparison.'.repeat(20);
// Compare compression methods
function compareCompression() {
console.log(`Original data size: ${input.length} bytes`);
// Gzip compression
zlib.gzip(input, (err, gzipped) => {
if (err) {
console.error('Gzip error:', err);
return;
}
console.log(`Gzip size: ${gzipped.length} bytes (${Math.round(100 - (gzipped.length / input.length * 100))}% reduction)`);
// Deflate compression
zlib.deflate(input, (err, deflated) => {
if (err) {
console.error('Deflate error:', err);
return;
}
console.log(`Deflate size: ${deflated.length} bytes (${Math.round(100 - (deflated.length / input.length * 100))}% reduction)`);
// Brotli compression (if available)
if (typeof zlib.brotliCompress === 'function') {
zlib.brotliCompress(input, (err, brotli) => {
if (err) {
console.error('Brotli error:', err);
return;
}
console.log(`Brotli size: ${brotli.length} bytes (${Math.round(100 - (brotli.length / input.length * 100))}% reduction)`);
});
} else {
console.log('Brotli compression not available in this Node.js version');
}
});
});
}
// Run the comparison
compareCompression();
Run example »
Note: Brotli compression is available in Node.js 10.16.0 and later versions. It typically achieves better compression ratios but may be slower than Gzip.
Compression Options
You can customize compression behavior with options:
const zlib = require('zlib');
const input = 'This is example content for compression with custom options.'.repeat(50);
// Test different compression levels
function testCompressionLevels() {
console.log(`Original size: ${input.length} bytes`);
// Default compression (level 6)
zlib.gzip(input, (err, compressed) => {
if (err) throw err;
console.log(`Default compression (level 6): ${compressed.length} bytes`);
// Fastest compression (level 1)
zlib.gzip(input, { level: 1 }, (err, fastCompressed) => {
if (err) throw err;
console.log(`Fast compression (level 1): ${fastCompressed.length} bytes`);
// Best compression (level 9)
zlib.gzip(input, { level: 9 }, (err, bestCompressed) => {
if (err) throw err;
console.log(`Best compression (level 9): ${bestCompressed.length} bytes`);
});
});
});
}
// Test compression with custom memory usage
function testMemoryLevels() {
// Memory levels: 1 (lowest) to 9 (highest)
zlib.gzip(input, { memLevel: 9 }, (err, compressed) => {
if (err) throw err;
console.log(`High memory usage (memLevel 9): ${compressed.length} bytes`);
zlib.gzip(input, { memLevel: 4 }, (err, lowMemCompressed) => {
if (err) throw err;
console.log(`Low memory usage (memLevel 4): ${lowMemCompressed.length} bytes`);
});
});
}
// Run tests
testCompressionLevels();
setTimeout(testMemoryLevels, 1000); // Slight delay to separate console output
Common Zlib options include:
level
: Compression level (0-9, 0=none, 9=best)memLevel
: Memory usage (1-9, 1=lowest, 9=highest)strategy
: Compression strategy (e.g., Z_DEFAULT_STRATEGY)dictionary
: Pre-defined dictionary for compressionwindowBits
: Window size logarithm
Error Handling
Proper error handling is crucial when working with compression:
const zlib = require('zlib');
const fs = require('fs');
// Function to safely decompress data
function safeDecompress(compressedData) {
return new Promise((resolve, reject) => {
zlib.gunzip(compressedData, { finishFlush: zlib.constants.Z_SYNC_FLUSH }, (err, result) => {
if (err) {
// Handle specific error types
if (err.code === 'Z_DATA_ERROR') {
reject(new Error('Invalid or corrupt compressed data'));
} else if (err.code === 'Z_BUF_ERROR') {
reject(new Error('Incomplete compressed data'));
} else {
reject(err);
}
return;
}
resolve(result);
});
});
}
// Example usage with error handling
async function demonstrateErrorHandling() {
try {
// Valid compression
const validData = await zlib.gzipSync('This is valid data');
console.log('Successfully compressed valid data');
// Try to decompress valid data
const result = await safeDecompress(validData);
console.log('Successfully decompressed:', result.toString());
// Try to decompress invalid data
const invalidData = Buffer.from('This is not compressed data');
await safeDecompress(invalidData);
} catch (err) {
console.error('Error occurred:', err.message);
}
}
demonstrateErrorHandling();
Practical Applications
1. Compressing Log Files
const zlib = require('zlib');
const fs = require('fs');
const path = require('path');
// Compress log files and add timestamp
function compressLogFile(logFilePath) {
// Generate output path with timestamp
const timestamp = new Date().toISOString().replace(/:/g, '-');
const basename = path.basename(logFilePath);
const outputPath = path.join(
path.dirname(logFilePath),
`${basename}-${timestamp}.gz`
);
// Create streams
const input = fs.createReadStream(logFilePath);
const output = fs.createWriteStream(outputPath);
const gzip = zlib.createGzip();
// Pipe the streams
input.pipe(gzip).pipe(output);
// Handle events
output.on('finish', () => {
console.log(`Log file compressed: ${outputPath}`);
// Optionally, clear the original log file
fs.writeFile(logFilePath, '', err => {
if (err) {
console.error(`Error clearing log file: ${err.message}`);
} else {
console.log(`Original log file cleared: ${logFilePath}`);
}
});
});
input.on('error', err => console.error(`Read error: ${err.message}`));
gzip.on('error', err => console.error(`Compression error: ${err.message}`));
output.on('error', err => console.error(`Write error: ${err.message}`));
}
// Example usage
// compressLogFile('server.log');
// Note: Uncomment the line above to compress an actual log file
console.log('This example shows how to compress log files with timestamps.');
2. API Response Compression
const http = require('http');
const zlib = require('zlib');
// Sample API data (imagine this is from a database)
const apiData = {
users: Array.from({ length: 100 }, (_, i) => ({
id: i + 1,
name: `User ${i + 1}`,
email: `user${i + 1}@example.com`,
role: i % 3 === 0 ? 'admin' : 'user',
created: new Date().toISOString(),
profile: {
bio: `This is a sample bio for user ${i + 1}. It contains some text to demonstrate compression.`,
interests: ['programming', 'reading', 'hiking', 'cooking', 'music'],
settings: {
notifications: true,
theme: 'dark',
language: 'en'
}
}
}))
};
// Create a simple API server
const server = http.createServer((req, res) => {
// Only handle GET requests to /api/users
if (req.method === 'GET' && req.url === '/api/users') {
// Convert data to JSON string
const jsonData = JSON.stringify(apiData);
// Check if client accepts compression
const acceptEncoding = req.headers['accept-encoding'] || '';
// Set JSON content type
res.setHeader('Content-Type', 'application/json');
// Compress based on accepted encoding
if (/\bgzip\b/.test(acceptEncoding)) {
res.setHeader('Content-Encoding', 'gzip');
// Compress and send
zlib.gzip(jsonData, (err, compressed) => {
if (err) {
res.statusCode = 500;
res.end(JSON.stringify({ error: 'Compression failed' }));
return;
}
console.log(`Original size: ${jsonData.length} bytes`);
console.log(`Compressed size: ${compressed.length} bytes`);
console.log(`Compression ratio: ${Math.round(100 - (compressed.length / jsonData.length * 100))}%`);
res.end(compressed);
});
} else {
// No compression
console.log(`Sending uncompressed response: ${jsonData.length} bytes`);
res.end(jsonData);
}
} else {
// Not found
res.statusCode = 404;
res.end(JSON.stringify({ error: 'Not found' }));
}
});
// Start server
const PORT = 8080;
server.listen(PORT, () => {
console.log(`API server running at http://localhost:${PORT}/`);
console.log('Test the API by visiting: http://localhost:8080/api/users');
});
Advanced Compression Techniques
1. Compression Strategies
Zlib offers different compression strategies that can be more effective for certain types of data:
const zlib = require('zlib');
// Sample data with repeated patterns (good for RLE)
const repeatedData = 'ABC'.repeat(1000);
// Test different compression strategies
function testStrategies(data) {
const strategies = [
{ name: 'DEFAULT_STRATEGY', value: zlib.constants.Z_DEFAULT_STRATEGY },
{ name: 'FILTERED', value: zlib.constants.Z_FILTERED },
{ name: 'HUFFMAN_ONLY', value: zlib.constants.Z_HUFFMAN_ONLY },
{ name: 'RLE', value: zlib.constants.Z_RLE },
{ name: 'FIXED', value: zlib.constants.Z_FIXED }
];
console.log(`Original size: ${data.length} bytes`);
strategies.forEach(({ name, value }) => {
const compressed = zlib.gzipSync(data, { strategy: value });
console.log(`${name.padEnd(20)}: ${compressed.length.toString().padEnd(5)} bytes`);
});
}
testStrategies(repeatedData);
2. Custom Dictionaries
For specific data patterns, custom dictionaries can improve compression ratio:
const zlib = require('zlib');
// Create a custom dictionary with common terms
const dictionary = Buffer.from('username,password,email,first_name,last_name,created_at,updated_at,status,active,inactive,pending,admin,user,role,permissions');
// Sample data that benefits from the dictionary
const userData = JSON.stringify({
username: 'johndoe',
email: 'john@example.com',
first_name: 'John',
last_name: 'Doe',
role: 'admin',
status: 'active',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
});
// Compress with and without dictionary
const compressedWithout = zlib.deflateSync(userData);
const compressedWith = zlib.deflateSync(userData, { dictionary });
console.log('Original size:', Buffer.byteLength(userData), 'bytes');
console.log('Compressed without dictionary:', compressedWithout.length, 'bytes');
console.log('Compressed with dictionary:', compressedWith.length, 'bytes');
console.log('Improvement:', Math.round((1 - (compressedWith.length / compressedWithout.length)) * 100) + '%');
// Decompress with dictionary
const decompressed = zlib.inflateSync(compressedWith, { dictionary });
console.log('Decompressed matches original:', decompressed.toString() === userData);
3. Progressive Compression
Process data in chunks as it becomes available:
const zlib = require('zlib');
const { Transform } = require('stream');
class ProgressTracker extends Transform {
constructor(options = {}) {
super(options);
this.processedBytes = 0;
this.startTime = Date.now();
}
_transform(chunk, encoding, callback) {
this.processedBytes += chunk.length;
const elapsed = (Date.now() - this.startTime) / 1000;
const rate = (this.processedBytes / 1024 / 1024 / elapsed).toFixed(2);
process.stdout.write(`\rProcessed: ${(this.processedBytes / 1024 / 1024).toFixed(2)} MB | ` +
`Rate: ${rate} MB/s`);
this.push(chunk);
callback();
}
}
// Simulate processing a large file
function processLargeFile() {
const gzip = zlib.createGzip({ level: 6 });
const progress = new ProgressTracker();
// Generate 100MB of random data
const data = Buffer.alloc(1024 * 1024 * 100);
// Create a readable stream from buffer
const { Readable } = require('stream');
const readable = Readable.from(data);
console.log('Starting compression...');
readable
.pipe(progress)
.pipe(gzip)
.pipe(process.stdout);
gzip.on('end', () => {
console.log('\nCompression complete!');
});
}
// Uncomment to run (creates a large file)
// processLargeFile();
Performance Considerations
- Compression level trade-offs: Higher levels = better compression but slower processing
- Memory usage: Compression can be memory-intensive, especially with high levels
- When to compress: Only compress data that benefits from compression (text, JSON, etc.)
- Already compressed data: Don't compress files that are already compressed (images, videos, etc.)
- Streaming: Use streams for large files to avoid memory issues
- Thread pool usage: Zlib operations use libuv's thread pool; configure with UV_THREADPOOL_SIZE if needed
Summary
The Node.js Zlib module provides essential compression and decompression functionality for:
- Reducing file sizes and bandwidth usage
- Working with compressed formats
- Implementing HTTP compression
- Processing large data efficiently using streams
Key features include:
- Support for multiple compression algorithms (Gzip, Deflate, Brotli)
- Both synchronous and asynchronous APIs
- Stream-based processing for memory efficiency
- Configurable compression levels and options
Understanding the Zlib module is essential for optimizing data transfer and storage in Node.js applications.