http.createServer((req, res) => { console.log(req.headers); console.log(''); var file = root + req.url; if(fs.existsSync(file)) { res.setHeader('Content-Encoding', 'gzip'); fs.createReadStream(file).pipe(zlib.createGzip()).pipe(res); } else { res.statusCode = 404; res.end(file + ' NOT FOUND!'); } }).listen(8080);
function uploadFileToS3(filePath, fileName) { let s3Stream = S3Stream(new AWS.S3()); let read = fs.createReadStream(filePath + fileName); let compress = zlib.createGzip(); let upload = s3Stream.upload({ Bucket: "static.smartystreets.com", Key: "sdk/" + version + "/" + fileName, StorageClass: "STANDARD", ContentType: "application/javascript", ContentEncoding: "gzip", }); upload.on("error", e => { throw new e }); upload.on("part", console.log); upload.on("uploaded", console.log); read .pipe(compress) .pipe(upload); }
/** * Create a read stream for a file in 'filePath' * For each chunk of file recieved pipe to a compression stream * Then pipe the compressed chunk stream to a write stream to a file path */ fs.createReadStream(filePath) .pipe(zlib.createGzip()) .pipe(fs.createWriteStream(`${filePath}.gz`)) .on('finish', () => { console.log('File successfully compressed') })
router.get('/sitemap.xml', function(req, res) { res.header('Content-Type', 'application/xml'); res.header('Content-Encoding', 'gzip'); if (req.app.locals.sitemap) { res.send(req.app.locals.sitemap); return; } try { const hostname = 'https://' + req.app.locals.config.domain; const smStream = new SitemapStream({ hostname }); const pipeline = smStream.pipe(createGzip()); Page.getByRange(0, 1000, pages => { pages.forEach(page => { smStream.write({ url: `/page/` + page.link }); }); streamToPromise(pipeline).then(sm => (req.app.locals.sitemap = sm)); smStream.end(); pipeline.pipe(res).on('error', e => { throw e; }); }); } catch (e) { console.error(e); res.status(500).end(); } });