let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', (chunk) => buffers.push(chunk)); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); cb(); }); extract.on('finish', () => { try { writeFiles(directory, files);
let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', data => { buffers.push(data); }); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); return cb(); }); extract.on('finish', () => { Object.keys(files).forEach(filename => { let outputPath = path.join(pathname, filename);
var interceptorFactory2 = function() { var interceptor = new stream.PassThrough() interceptor.on('end', function() { intercepted2 = true }) return interceptor }
writeStream.on('end', function() { var b = Buffer.concat(this.data); payload.data = mime + b.toString('base64'); payload.optimized_size = payload.data.length; logger.info(app_id, 'optimized_to', payload.data.length); socket.emit('fruum:optimize', payload); });
combine( fs.createReadStream(process.argv[3]) .pipe(compressAndEncryptStream(process.argv[2])) .pipe(fs.createWriteStream(process.argv[3] + ".gz.enc")) ).on('error', err => { //this error may come from any stream in the pipeline console.log(err); }) ;
startStream(offset) { this.offset = this.hashes.findIndex(h => offset <= h.offset + h.size); Log.debug('starting to download from offset: ', offset, `${this.offset}/${this.hashes.length - 1}`); this.skipBytes = offset - this.hashes[this.offset].offset; this.output = new PassThrough(); this.output.on('close', this.end.bind(this)); this.downloadNext(); return this.output; }
new PassThrough({ transform (chunk, encoding, callback) { if (this._readableState.pipesCount > 0) { this.push(chunk) } callback() } }) .setMaxListeners(30) .on('error', (err) => { console.log(err.message) })
test('read: errors if content missing', function (t) { const stream = read.stream(CACHE, 'sha512-whatnot') stream.on('data', function (data) { throw new Error('unexpected data: ' + JSON.stringify(data)) }) stream.on('end', function () { throw new Error('end was called even though stream errored') }) return BB.join( finished(stream).catch({ code: 'ENOENT' }, err => err), read(CACHE, 'sha512-whatnot').catch({ code: 'ENOENT' }, err => err), (streamErr, bulkErr) => { t.equal(streamErr.code, 'ENOENT', 'stream got the right error') t.equal(bulkErr.code, 'ENOENT', 'bulk got the right error') } ) })
suite.add('content.read.stream() small data', { defer: true, setup () { const fixture = new Tacks(CacheContent({ [INTEGRITY]: CONTENT })) fixture.create(CACHE) }, fn (deferred) { const stream = read.stream(CACHE, INTEGRITY) stream.on('data', () => {}) stream.on('error', err => deferred.reject(err)) stream.on('end', () => { deferred.resolve() }) } })
constructor(props) { super(props); this.term = new XTerm(); this.stdin = new PassThrough(); this.stdout = new PassThrough(); this.stdout.rows = 0; this.stdout.columns = 0; this.term.on(`resize`, ({ rows, cols: columns }) => { Object.assign(this.stdout, { rows, columns }).emit(`resize`); }); this.term.on(`data`, data => { this.stdin.write(data.toString()); }); this.stdout.on(`data`, data => { this.term.write(data.toString()); }); }
writeStream.on('error', function(e) { logger.error(app_id, 'optimize_writestream', e); socket.emit('fruum:optimize', payload); });
writeStream.on('data', function(chunk) { this.data.push(chunk); });
combine( fs .createReadStream(process.argv[3]) .pipe(compresssAndEncryptStream(process.argv[2])) .pipe(fs.createWriteStream(process.argv[3] + ".gz.enc")) ).on("error", err => console.log(err));
var interceptorFactory = function() { var interceptor = new stream.PassThrough() interceptor.on('end', function() { intercepted = true }) return interceptor }
suite.add('content.read.stream() big data', { defer: true, setup () { const fixture = new Tacks(CacheContent({ [BIGINTEGRITY]: BIGCONTENT })) fixture.create(CACHE) }, fn (deferred) { const stream = read.stream(CACHE, BIGINTEGRITY) stream.on('data', () => {}) stream.on('error', err => deferred.reject(err)) stream.on('end', () => { deferred.resolve() }) } })