req.pipe(zlib.createGunzip()).pipe(tar.extract()) .on('entry', function (entry, stream, next) { if (entry.type === 'file') {
test('It should count the right number of bytes', done => { const filePath = join(__dirname, '..', '..', 'assets', 'moby-dick.txt.gz') const srcStream = createReadStream(filePath) const unzippedStream = srcStream.pipe(createGunzip()) countBytes(unzippedStream, (err, bytes) => { if (err) { throw err } expect(bytes).toBe(1234481) done() }) })
function createDecgz (secret, iv) { const cipherKey = createCipherKey(secret) const decryptStream = createDecipheriv('aes256', cipherKey, iv) const gunzipStream = createGunzip() const stream = pumpify(gunzipStream, decryptStream) return stream }
// go process.nextTick(function () { fs.createReadStream(opts.src) .on('error', error) .pipe(zlib.createGunzip(opts.gz) .on('error', error)) .pipe(tar.extract(opts.dest, opts.tar) .on('error', error) .on('finish', callback)); });
fs.createReadStream(filepath) .pipe(zlib.createGunzip()) .on('data', chunk => chunks.push(chunk)) .on('finish', () => { ctx = Buffer.concat(chunks); ctx = JSON.parse(ctx).formImage.Pages.map(o => ({Texts: o.Texts})); }) .on('end', () => { resolve(ctx); });
t.test('stream gzip', function (t) { t.plan(2); let res = ''; const s = sc.range(10).stream({gzip: true}); t.ok(s instanceof stream.Readable, 'ds.stream() returns a readable stream'); const rs = s.pipe(zlib.createGunzip()); rs.on('data', function (data) {res += data.toString();}); rs.on('end', function () { t.equal(res, '0\n1\n2\n3\n4\n5\n6\n7\n8\n\9\n', 'gunzip data read is correct'); sc.end(); }); });
downloadExpandNodeSource() { const url = `https://nodejs.org/dist/v${this.version}/node-v${this.version}.tar.gz`; if(fs.existsSync(this.nodePath('configure'))) { log(`node version=${this.version} already downloaded and expanded, using it`); return Promise.resolve(); } return download(url, this.nodeSrcFile) .then(() => new Promise((resolve, reject) => { log(`expanding node source, file=${this.nodeSrcFile} ...`); fs.createReadStream(this.nodeSrcFile) .pipe(createGunzip()) .pipe(tar.extract(dirname(this.nodeSrcFile))) .on('error', reject) .on('finish', resolve) }) ); }
http.createServer((req, res) => { const { filename } = req.headers console.log(`File request received: ${filename}`) req .pipe(crypto.createDecipher('aes-192-gcm', 'secret')) .pipe(zlib.createGunzip()) .pipe(fs.createWriteStream(filename)) .on('finish', () => { res.writeHead(201, { 'Content-Type': 'text/plain' }) res.end('That\'s it\n') console.log(`File saved: ${filename}`) }) })
const extractTar = (parts, outdir) => { return new Promise((resolve, reject) => { const unpack = merge2(...parts.map(part => fs.createReadStream(part))) .pipe(zlib.createGunzip()) .pipe(tar.extract(outdir)); unpack.on('finish', () => resolve()); unpack.on('error', e => reject(e)); }); }
async function checkAndDownload() { console.log(`Checking if build tdlib-v${tdlib.version}-${tdlib.commit}-${tdlib.variant}-${arch_version} exist...`) let head = await rp({ method: 'HEAD', url: tdlib.debug ? url.debug : url.main, resolveWithFullResponse: true, simple: false, followRedirect: false }) if (head.statusCode == 301 || head.statusCode == 302) { console.log('Build exist. Downloading...') rp.get(tdlib.debug ? url.debug : url.main).pipe(zlib.createGunzip()).pipe(fs.createWriteStream('./tdlib.node')).on('finish', () => { process.exit(0) }) } else { console.log('Build does not exist. Trying to compile...') process.exit(1) } }
function processIntakeReq (req) { return req.pipe(zlib.createGunzip()).pipe(ndjson.parse()) }
/** * Find the list of file names in the specified chaincode deployment specification. * @private * @param {ChaincodeDeploymentSpec} chaincodeDeploymentSpec The chaincode deployment specification. * @returns {string[]} The list of file names. */ static async _findFileNames(chaincodeDeploymentSpec) { const codePackage = chaincodeDeploymentSpec.getCodePackage().toBuffer(); const gunzip = zlib.createGunzip(); const extract = tar.extract(); return new Promise((resolve) => { const fileNames = []; extract.on('entry', (header, stream, next) => { logger.debug('Package._findFileNames - found entry %s', header.name); fileNames.push(header.name); stream.on('end', () => { next(); }); stream.resume(); }); extract.on('finish', () => { resolve(fileNames.sort()); }); gunzip.pipe(extract); gunzip.end(codePackage); }); }
persistAssetsFromManifestStream (stream, derivativeUrn) { return new Promise((resolve, reject) => { try { let data = ''; const gstream = stream.pipe(zlib.createGunzip()); gstream.on('data', chunk => data += chunk); gstream.on('finish', () => { this.persistAssetsFromManifest(JSON.parse(data), derivativeUrn); resolve() }) } catch (err) { reject(err) } }) }
test('It should count the right number of words', done => { const filePath = join(__dirname, '..', '..', 'assets', 'moby-dick.txt.gz') const srcStream = createReadStream(filePath) const unzippedStream = srcStream.pipe(createGunzip()) countWords(unzippedStream, (err, bytes) => { if (err) { throw err } expect(bytes).toBe(212793) done() }) })
/** * Find the list of file names in the specified chaincode deployment specification. * @private * @param {ChaincodeDeploymentSpec} chaincodeDeploymentSpec The chaincode deployment specification. * @returns {string[]} The list of file names. */ static async _findFileNames(chaincodeDeploymentSpec) { const codePackage = chaincodeDeploymentSpec.getCodePackage().toBuffer(); const gunzip = zlib.createGunzip(); const extract = tar.extract(); return new Promise((resolve) => { const fileNames = []; extract.on('entry', (header, stream, next) => { logger.debug('Package._findFileNames - found entry %s', header.name); fileNames.push(header.name); stream.on('end', () => { next(); }); stream.resume(); }); extract.on('finish', () => { resolve(fileNames.sort()); }); gunzip.pipe(extract); gunzip.end(codePackage); }); }