// Run everything in parallel... async.parallel(data.map((file) => { return (callback) => { pack.entry({name: file.filename}, file.buffer, () => { packSize += file.buffer.byteLength; progress.log(`Packing "${file.filename}" (${((packSize / data._size) * 100).toFixed(2)}%) ...`); callback(); }); }; }), (err) => { if (err) { return reject(err); } pack.finalize(); });
let extract = tar.extract(); let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', (chunk) => buffers.push(chunk)); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); cb(); }); }); extract.on('finish', () => { try { writeFiles(directory, files);
req.pipe(zlib.createGunzip()).pipe(tar.extract()) .on('entry', function (entry, stream, next) { if (entry.type === 'file') { if (entry.name.endsWith('.html')) { filename = entry.name request.files[entry.name] = Buffer.concat(data).toString() }) next() }) .on('finish', function () { res.end(`{"id": "some-id", "html": "/public/some-id/${filename}"}`) self.requests.push(request)
let extract = tar.extract(); let tarStream = new stream.PassThrough(); extract.on('entry', (header, stream, cb) => { let buffers = []; stream.on('data', data => { buffers.push(data); }); stream.on('end', () => { files[header.name] = Buffer.concat(buffers); return cb(); }); }); extract.on('finish', () => { Object.keys(files).forEach(filename => { let outputPath = path.join(pathname, filename);
if (limit < 0) stream.destroy(new Error('Too much data. Should be less than 32MB'))
extract.on('finish', () => { resolve() })
downloadExpandNodeSource() { const url = `https://nodejs.org/dist/v${this.version}/node-v${this.version}.tar.gz`; if(fs.existsSync(this.nodePath('configure'))) { log(`node version=${this.version} already downloaded and expanded, using it`); return Promise.resolve(); } return download(url, this.nodeSrcFile) .then(() => new Promise((resolve, reject) => { log(`expanding node source, file=${this.nodeSrcFile} ...`); fs.createReadStream(this.nodeSrcFile) .pipe(createGunzip()) .pipe(tar.extract(dirname(this.nodeSrcFile))) .on('error', reject) .on('finish', resolve) }) ); }
/** * Unpack the tarball into given directory. * * Used from https://github.com/ywangii/await-targz under MIT. * * @param {String} tarball Path for tarball to be extracted * @param {String} dir Path for the file to be extracted * @returns {Promise} A promise represents if unpack tarball succeeds or fails * @public */ async unpack(tarball, dir) { return { then: (fulfill, reject) => { const logOpts = { tarball, dir }; const readableStream = fs.createReadStream(tarball).once('error', this._logError(`fs.createReadStream`, logOpts)); const unzip = zlib.createUnzip().once('error', this._logError(`zlib.createUnzip`, logOpts)); const extract = tar.extract(dir).once('error', this._logError(`tar.extract`, logOpts)); pump(readableStream, unzip, extract, err => { if (err) return reject(err); fulfill(); }); } }; }
extract.on('finish', () => { resolve() })