fs.createReadStream(process.argv[2]) .pipe(split()) .pipe(new LimitedParallelStream(2, (url, enc, push, done) => { if(!url) return done(); request.head(url, (err, response) => { push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) .on('finish', () => console.log('All urls were checked')) ;
split().on('data', (message) => { logger.info(message); })
readStream .pipe(split(JSON.parse)) .on('data', function (line) { line.id = ++n; console.log(`${n}: ${JSON.stringify(line)}`); });
let count = 0; fs.createReadStream(syslogFile) .pipe(split()) .on('data', function (chunk) { count++;
function parseClosest(input, callback) { debug('start'); input. // errors handling on('error', function(err) { debug('error', err); callback(err, null); }). // split the input stream by newlines and parse as JSON pipe(split(JSON.parse)). // process the data on('data', function(data) { var closest = false; debug('data', data); if (data && data.archived_snapshots && data.archived_snapshots.closest) { closest = data.archived_snapshots.closest; } callback(null, closest); }); }
app.use(morgan('dev', { skip: function(req, res) { // Remove spamming useless logs var skipArray = ["/update_logs", "/get_pourcent_generation", "/status", "/completion", "/watch", "/"]; var currentURL = req.originalUrl; if (currentURL.indexOf("?") != -1) { // Remove params from URL currentURL = currentURL.split("?")[0]; } if (skipArray.indexOf("/"+currentURL.split("/")[currentURL.split("/").length -1]) != -1) { return true; } }, stream: split().on('data', function(line) { if (allLogStream.bytesWritten < 5000) { if(globalConf.env != "develop"){ allLogStream.write(moment().format("MM-DD HH:mm:ss") + ": " + ansiToHtml.toHtml(line) + "\n"); process.stdout.write(moment().format("MM-DD HH:mm:ss") + " " + line + "\n"); } else { allLogStream.write(ansiToHtml.toHtml(line) + "\n"); process.stdout.write(line + "\n"); } } else { /* Clear all.log if too much bytes are written */ fs.writeFileSync(path.join(__dirname, 'all.log'), ''); allLogStream.bytesWritten = 0; } }) }));
pipe(split(',\n')).
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(new ParallelStream(transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked'); console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(throughParallel.obj({concurrency: 2}, transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked'); console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
fs.createReadStream(process.argv[2]) .pipe(split()) .pipe(throughParallel.obj({concurrency: 2}, function (url, enc, done) { if(!url) return done(); request.head(url, (err, response) => { this.push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) .on('finish', () => console.log('All urls were checked')) ;
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(new LimitedParallelStream(concurrency, transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked') console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
fs.createReadStream(process.argv[2]) //[1] .pipe(split()) //[2] .pipe(new ParallelStream((url, enc, done, push) => { //[3] if(!url) return done(); request.head(url, (err, response) => { push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) //[4] .on('finish', () => console.log('All urls were checked')) ;
fs.createReadStream(process.argv[2]) .pipe(split()) .pipe( new ParallelStream((url, enc, push, done) => { if (!url) return done && done(); request.head(url, (err, response) => { push(`${url} is ${err ? "down" : "up"} \n`); done && done(); }); }) ) .pipe(fs.createWriteStream("results.txt")) .on("finish", () => console.log("All urls were checked"));
split().on('data', (message) => { logger.info(message); })