tarball.on('close', () => { let buffer = fs.readFileSync(tmpPath); fs.unlinkSync(tmpPath); progress.log(`Package size: ${formatSize(buffer.byteLength)}`); progress.log(`Compressing ...`); zlib.gzip(buffer, (err, result) => { if (err) { return reject(err); } let t = new Date().valueOf() - start; progress.log(`Compressed size: ${formatSize(result.byteLength)}`); progress.log(`Compression: ${((result.byteLength / buffer.byteLength) * 100).toFixed(2)}%`); progress.log(`Pack complete, took ${t}ms!`); resolve(result); }); });
if (compressing) { resHeader["Content-Encoding"] = "gzip"; zlib.gzip(content, (err, buffer) => {
fs.unlinkSync(tmpPath); zlib.gzip(buffer, (err, result) => {
fs.unlinkSync(tmpPath); zlib.gzip(buffer, (err, result) => {
const zlibPromised = body => new Promise(((resolve, reject) => { zlib.gzip(body, (err, res) => { if (err) return reject(err); return resolve(res); }); }))
zlib.gzip(buf, function(_, zippedmsg) { if (self.ws.socket.readyState == WebSocket.OPEN) { self.ws.send(zippedmsg); resolve(); } else { reject("websocket_not_ready"); } });
flush(epId) { const reward = rewards(this.memory[epId]); if (reward !== 0) { const text = JSON.stringify(this.memory[epId]); const path = this.makePath(reward); zlib.gzip(text, (_, gz) => { fs.writeFileSync(path, gz); }); } delete this.memory[epId]; }
// Select a file over 1GB fs.readFile(file, (err, buffer) => { if (err) console.log(err); zlib.gzip(buffer, (err, buffer) => { if (err) console.log(err); fs.writeFile(`${file}.gz`, buffer, err => { if (err) console.log(err); console.log("successfully compressed"); }); }); });
zlib.gzip(str, {chunkSize: 65536}, function (err, res) { const filename = task.basedir + 'task-' + uuid.v4() + '.gz'; fs.writeFile(filename, res, function (err) { if (err) throw new Error(err); rpc('runztask', wid, filename, function (err, res) { self.worker[wid].ntask--; callback(err, res, task); }); }); });
zlib.gzip(buffer, (error, result) => { if (error) { reject(error); } else { resolve(result); } });
function write (dest, code, zip) { return new Promise((resolve, reject) => { function report (extra) { console.log(blue(path.relative(process.cwd(), dest)) + ' ' + getSize(code) + (extra || '')) resolve() } fs.writeFile(dest, code, err => { if (err) return reject(err) if (zip) { zlib.gzip(code, (err, zipped) => { if (err) return reject(err) report(' (gzipped: ' + getSize(zipped) + ')') }) } else { report() } }) }) }
zlib.gzip(json, function (error, result) { if (error) throw error; cont++; var fname = filename + '.json'; var file = cont + '_' + fname + '.zip'; s3Array.push(file); var params = { Bucket: config.bucketUpload, Key: "mysqlupload/" + file, Body: result, ContentType: 'application/zip' }; var options = { partSize: 10 * 1024 * 1024, queueSize: 1 }; s3.upload(params, options, function (err, data) { if (data) { //console.log("Uploaded File", file); done(); } }); });
zlib.gzip(bufferUncompressed, null, function (err, bufferCompressed) { // if the compression was successful if (!err) { json = undefined; // don't specify the 'json' option // use the compressed buffer as the body and // set the appropriate content encoding body = bufferCompressed; headers['Content-Encoding'] = 'gzip'; } else { Logger.getInstance().warn('Could not compress request body.'); } sendRequest(); });
zlib.gzip(buf, (err, data) => { console.log('%s size: %d byte', src, buf.length); console.log('%s size: %d byte', dst, data.length); fs.writeFileSync(dst, data); });
zlib.gzip(str, {chunkSize: 65536}, function (err, res) { const filename = task.basedir + 'task-' + uuid.v4() + '.gz'; fs.writeFile(filename, res, function (err) { if (err) throw new Error(err); rpc('runztask', wid, filename, function (err, res) { self.worker[wid].ntask--; callback(err, res, task); }); }); });