const uploadEvents = async (events) => { const outStream = zlib.createGzip(); events.forEach(e => { outStream.write(`${JSON.stringify(humps.decamelizeKeys(e))}\n`, 'utf8'); }); outStream.end(); const date = new Date().toISOString(); const partitionPrefix = date.substring(0, 13); const fileName = `dt=${partitionPrefix}/${date}.json.gz`; const params = { Bucket: process.env.HN_INSIGHTS_EVENTS_BUCKET || 'hn-insights-events', Key: fileName, Body: outStream }; console.log(`Uploading ${fileName}: ${events.length} events...`); await s3.upload(params).promise(); console.log(`Uploading ${fileName} done`); }
.pipe(res) .on('error', (err) => { throw err;
fs.createReadStream(file) .pipe(zlib.createGzip()) .pipe(crypto.createCipher('aes-192-gcm', 'secret')) .pipe(req) .on('finish', () => { console.log('File successfully sent') })
http.createServer((req, res) => { console.log(req.headers); console.log(''); var file = root + req.url; if(fs.existsSync(file)) { res.setHeader('Content-Encoding', 'gzip'); fs.createReadStream(file).pipe(zlib.createGzip()).pipe(res); } else { res.statusCode = 404; res.end(file + ' NOT FOUND!'); } }).listen(8080);
// Standard Input is piped to a gzip stream. The gzip stream is piped into a file stream. setInterval(function(){ gzip.flush(); // Flush every now and then, so the file is actually written to disk. Otherwise compression happens in memory only. }, 100);
const press = (data, encoding) => { if (!res.getHeader('content-encoding')) { res.compressPass = true; res.setHeader('content-encoding', 'gzip'); res.removeHeader('content-length'); vary(res, 'content-encoding'); } zip.write(Buffer.from(data, encoding)); }
/** * Create a read stream for a file in 'filePath' * For each chunk of file recieved pipe to a compression stream * Then pipe the compressed chunk stream to a write stream to a file path */ fs.createReadStream(filePath) .pipe(zlib.createGzip()) .pipe(fs.createWriteStream(`${filePath}.gz`)) .on('finish', () => { console.log('File successfully compressed') })
gzipStream.on('end', () => { destStream.end() })
readStream // reads current file .pipe(encryptStream) // encrypts .pipe(gzip) // compresses .pipe(writeStream) // writes to out file .on('finish', function () { // all done console.log('Encription and compression done.'); });
.pipe(res) .on('error', (err) => { throw err;
// Select a file over 1GB fs.createReadStream(file) .pipe(zlib.createGzip()) .pipe(fs.createWriteStream(`${file}.gz`)) .on("finish", () => console.log("Successfully compressed"));