Archiver — Streaming Archive Packaging
A Node.js streaming archive library that supports ZIP and TAR, capable of appending content from multiple data sources.
Use Cases
Use when users need to compress and package, create zip/tar archives, package directories, or programmatically generate compressed files.
Trigger Words
Compress, package, archive, zip, tar, archive, archiver.
Installation
npm install archiver
Quick Start: ZIP Packaging
const fs = require('fs');
const archiver = require('archiver');
const output = fs.createWriteStream('output.zip');
const archive = archiver('zip', { zlib: { level: 9 } });
output.on('close', () => {
console.log(`${archive.pointer()} bytes written`);
});
archive.on('error', (err) => { throw err; });
archive.pipe(output);
// Append — multiple data sources
archive.append(fs.createReadStream('file.txt'), { name: 'file.txt' }); // Stream
archive.append('string content', { name: 'readme.txt' }); // String
archive.append(Buffer.from('data'), { name: 'data.bin' }); // Buffer
archive.file('local-file.txt', { name: 'renamed.txt' }); // Local file
archive.directory('src/', 'src'); // Directory → subdirectory in archive
archive.directory('dist/', false); // Directory contents → archive root
archive.glob('*.js', { cwd: __dirname }); // Glob match
archive.finalize();
Data Source Overview
| Method | Data Source | name Parameter |
|---|---|---|
archive.append(stream, { name }) | ReadStream | Required |
archive.append(string, { name }) | String | Required |
archive.append(buffer, { name }) | Buffer | Required |
archive.file(path, { name }) | File path | Optional, can rename |
archive.directory(path, dest) | Directory | false = contents to root; string = subdirectory name |
archive.glob(pattern, { cwd }) | Glob match | Auto-uses matched filenames |
TAR Packaging
const archive = archiver('tar', {
gzip: true,
gzipOptions: { level: 6 }
});
TAR-specific options:
| Option | Description | Default |
|---|---|---|
gzip | Enable gzip compression | false |
gzipOptions.level | Compression level 0-9 | 6 |
Events
archive.on('warning', (err) => {
if (err.code === 'ENOENT') console.warn('File not found:', err);
else throw err;
});
archive.on('error', (err) => { throw err; });
// Events on the piped destination stream (from Node.js Stream API)
output.on('close', () => { /* File descriptor closed */ });
output.on('end', () => { /* Data drained */ });
output.on('finish', () => { /* All data written */ });
warning— Non-fatal errors (e.g., file not found, stat failure, etc.);ENOENTcan be ignored, others should be thrownerror— Fatal errors; must be handledclose(output) — Emitted after the file descriptor is closed;archive.pointer()can be used to get the total byte count at this pointend(output) — Data drained; emitted regardless of the data sourceprogress— Progress tracking, see below
Progress Tracking
archive.on('progress', (progress) => {
console.log(`${progress.entries.processed} / ${progress.entries.total} entries`);
console.log(`${progress.fs.processedBytes} / ${progress.fs.totalBytes} bytes`);
});
progress object structure:
{
entries: { total: number, processed: number },
fs: { totalBytes: number, processedBytes: number }
}
Common Patterns
Pattern 1: Responsive Archiving — HTTP Streaming Output
app.get('/download', (req, res) => {
res.attachment('archive.zip');
const archive = archiver('zip', { zlib: { level: 1 } }); // Low compression = faster
archive.on('error', (err) => { res.status(500).end(); });
archive.pipe(res);
archive.directory('user-files/', false);
archive.finalize();
});
Pattern 2: Conditional Append — On-Demand Packaging
const archive = archiver('zip');
// Dynamically append based on conditions
if (includeSource) {
archive.directory('src/', 'source');
}
if (includeDocs) {
archive.glob('docs/**/*.md', { cwd: __dirname });
}
archive.finalize();
Pattern 3: In-Memory Archiving — No File Output
const { Writable } = require('stream');
const chunks = [];
const memoryStream = new Writable({
write(chunk, enc, cb) { chunks.push(chunk); cb(); }
});
const archive = archiver('zip');
archive.pipe(memoryStream);
archive.append('hello', { name: 'hello.txt' });
archive.finalize();
memoryStream.on('finish', () => {
const buffer = Buffer.concat(chunks);
console.log(`Archive in memory: ${buffer.length} bytes`);
// Can be used for uploading, sending, etc.
});
Pattern 4: Batch Packaging — Multiple Archives in Series
async function createBatchedArchives(fileGroups, outputDir) {
for (const [i, files] of fileGroups.entries()) {
await new Promise((resolve, reject) => {
const output = fs.createWriteStream(`${outputDir}/batch-${i}.zip`);
const archive = archiver('zip');
archive.on('error', reject);
output.on('close', resolve);
archive.pipe(output);
files.forEach(f => archive.file(f, { name: path.basename(f) }));
archive.finalize();
});
console.log(`Batch ${i} complete`);
}
}
Custom Format Registration
archiver.registerFormat('myformat', module);
const archive = archiver('myformat');
Check if a format is registered:
if (archiver.isRegisteredFormat('zip')) {
// ZIP is available
}
Symlink Handling
archive.symlink('target', { name: 'link-name' });
Common Options Reference — archiver(format, options)
const archive = archiver('zip', {
zlib: { level: 9 }, // Compression level 0-9
comment: 'my comment', // ZIP comment
forceLocalTime: true, // Use local time instead of UTC
forceZip64: false, // Whether to force Zip64
namePrependSlash: false, // Prepend / to filenames
statConcurrency: 4 // Stat concurrency
});
For the full API reference, see references/api-reference.md.