let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', (chunk) => buffers.push(chunk)); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); cb(); }); extract.on('finish', () => { try { writeFiles(directory, files);
let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', data => { buffers.push(data); }); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); return cb(); }); extract.on('finish', () => { Object.keys(files).forEach(filename => { let outputPath = path.join(pathname, filename);
extract.on('finish', () => { resolve() })
downloadExpandNodeSource() { const url = `https://nodejs.org/dist/v${this.version}/node-v${this.version}.tar.gz`; if(fs.existsSync(this.nodePath('configure'))) { log(`node version=${this.version} already downloaded and expanded, using it`); return Promise.resolve(); } return download(url, this.nodeSrcFile) .then(() => new Promise((resolve, reject) => { log(`expanding node source, file=${this.nodeSrcFile} ...`); fs.createReadStream(this.nodeSrcFile) .pipe(createGunzip()) .pipe(tar.extract(dirname(this.nodeSrcFile))) .on('error', reject) .on('finish', resolve) }) ); }
extract.on('entry', (header, stream, cb) => { const file = header.name.split('/').pop() stream.on('data', (chunk) => { if (file === 'package.json') data += chunk }) stream.on('end', () => { if (data) { try { const pkgJSON = JSON.parse(data) resolve({ type: 'remote', version: pkgJSON.version, dependencies: pkgJSON.dependencies, url: spec }) } catch (e) { reject(e) } } else { cb() } }) stream.resume() })
/** * Find the list of file names in the specified chaincode deployment specification. * @private * @param {ChaincodeDeploymentSpec} chaincodeDeploymentSpec The chaincode deployment specification. * @returns {string[]} The list of file names. */ static async _findFileNames(chaincodeDeploymentSpec) { const codePackage = chaincodeDeploymentSpec.getCodePackage().toBuffer(); const gunzip = zlib.createGunzip(); const extract = tar.extract(); return new Promise((resolve) => { const fileNames = []; extract.on('entry', (header, stream, next) => { logger.debug('Package._findFileNames - found entry %s', header.name); fileNames.push(header.name); stream.on('end', () => { next(); }); stream.resume(); }); extract.on('finish', () => { resolve(fileNames.sort()); }); gunzip.pipe(extract); gunzip.end(codePackage); }); }
return new Promise(resolve => { let content = ''; extract.on('entry', (header, stream, next) => { if (header.name === 'skeleton') { stream.on('data', data => { content += data.toString(); }); stream.on('end', () => { next(); }); stream.resume(); }); extract.on('finish', () => { if (content) { resolve(stringToSkeleton(content));
extract.on('finish', () => { resolve() })
/** * Find the list of file names in the specified chaincode deployment specification. * @private * @param {ChaincodeDeploymentSpec} chaincodeDeploymentSpec The chaincode deployment specification. * @returns {string[]} The list of file names. */ static async _findFileNames(chaincodeDeploymentSpec) { const codePackage = chaincodeDeploymentSpec.getCodePackage().toBuffer(); const gunzip = zlib.createGunzip(); const extract = tar.extract(); return new Promise((resolve) => { const fileNames = []; extract.on('entry', (header, stream, next) => { logger.debug('Package._findFileNames - found entry %s', header.name); fileNames.push(header.name); stream.on('end', () => { next(); }); stream.resume(); }); extract.on('finish', () => { resolve(fileNames.sort()); }); gunzip.pipe(extract); gunzip.end(codePackage); }); }