tar.pack(workdir, { // ignore files from ignore list ignore: name => { const relativePath = path.relative(workdir, name); const result = multimatch([relativePath], ignores).length !== 0; return result; }, // map custom config to exoframe.json when provided map: headers => { // if working with custom config - change its name before packing if (args.config && headers.name === args.config) { return { ...headers, name: 'exoframe.json', }; } return headers; }, })
tar.extract('./', { ignore: (name, header) => { fileNames.push(name);
// go process.nextTick(function () { fs.createReadStream(opts.src) .on('error', error) .pipe(zlib.createGunzip(opts.gz) .on('error', error)) .pipe(tar.extract(opts.dest, opts.tar) .on('error', error) .on('finish', callback)); });
https.get(DOWNLOAD_URL, (res) => { if (res.statusCode !== 200) { throw new Error(DOWNLOAD_URL + ' ' + res.statusMessage); } else { console.log(DOWNLOAD_URL + ' is finished downloaded.'); } if (EXT_NAME === '.zip') { res.pipe(unzip.Extract({ path: './tensorflow' })); } { res.pipe(gunzip()).pipe(tar.extract('./tensorflow')); } });
const extractTar = (parts, outdir) => { return new Promise((resolve, reject) => { const unpack = merge2(...parts.map(part => fs.createReadStream(part))) .pipe(zlib.createGunzip()) .pipe(tar.extract(outdir)); unpack.on('finish', () => resolve()); unpack.on('error', e => reject(e)); }); }
downloadExpandNodeSource() { const url = `https://nodejs.org/dist/v${this.version}/node-v${this.version}.tar.gz`; if(fs.existsSync(this.nodePath('configure'))) { log(`node version=${this.version} already downloaded and expanded, using it`); return Promise.resolve(); } return download(url, this.nodeSrcFile) .then(() => new Promise((resolve, reject) => { log(`expanding node source, file=${this.nodeSrcFile} ...`); fs.createReadStream(this.nodeSrcFile) .pipe(createGunzip()) .pipe(tar.extract(dirname(this.nodeSrcFile))) .on('error', reject) .on('finish', resolve) }) ); }
/** * Unpack the tarball into given directory. * * Used from https://github.com/ywangii/await-targz under MIT. * * @param {String} tarball Path for tarball to be extracted * @param {String} dir Path for the file to be extracted * @returns {Promise} A promise represents if unpack tarball succeeds or fails * @public */ async unpack(tarball, dir) { return { then: (fulfill, reject) => { const logOpts = { tarball, dir }; const readableStream = fs.createReadStream(tarball).once('error', this._logError(`fs.createReadStream`, logOpts)); const unzip = zlib.createUnzip().once('error', this._logError(`zlib.createUnzip`, logOpts)); const extract = tar.extract(dir).once('error', this._logError(`tar.extract`, logOpts)); pump(readableStream, unzip, extract, err => { if (err) return reject(err); fulfill(); }); } }; }
api.hook('build:docker') .prepend('build-docker-image', async api => { await api.fs.copy({src: dockerTemplate, dest: api.config.docker.dist}) const stream = await tar.pack(api.config.docker.dist) await new Promise((resolve, reject) => { const docker = new Docker() docker.buildImage( stream, {t: api.config.docker.name}, (err, output) => { if (err) { console.error(err) reject(err) } if (output) { output.pipe(process.stdout, {end: true}) output.on('end', resolve) } } ) }) })
.pipe(tar.extract(prebuilds), { dmode: 0o755, fmode: 0o644 }) .on('finish', loop) .on('error', callback)
return reject('Path "' + opts.path + '" does not exist') const tarStream = tar.pack(opts.path) tarStream.entry({ name: 'Dockerfile' }, opts.dockerfile) opts.docker.buildImage(tarStream, {t: opts.image_name}, (err, stream) => {
// go process.nextTick(function () { tar.pack(opts.src, opts.tar) .on('error', error) .pipe(zlib.createGzip(opts.gz) .on('error', error)) .pipe(fs.createWriteStream(opts.dest) .on('error', error) .on('finish', callback)); });
clipped.hook('build:docker') .prepend('build-docker', async clipped => { try { await clipped.copy([ {src: clipped.config.dockerTemplate, dest: clipped.config.dist} ]) // let docker = new Docker({ socketPath: '/var/run/docker.sock' }) const stream = await tar.pack(clipped.config.dist) // const files = fs.readdirSync(clipped.config.dist) // console.log(`name is ${clipped.config.name}, ${files}`) await clipped.docker.buildImage( stream, { t: clipped.config.name // dockerfile: path.join(clipped.config.dist, 'Dockerfile') } ) } catch (e) { console.error(e) } })
/** * Unpack the tarball into given directory. * * Used from https://github.com/ywangii/await-targz under MIT. * * @param {String} tarball Path for tarball to be extracted * @param {String} dir Path for the file to be extracted * @returns {Promise} A promise represents if unpack tarball succeeds or fails * @public */ async unpack(tarball, dir) { return { then: (fulfill, reject) => { const logOpts = { tarball, dir }; const readableStream = fs.createReadStream(tarball).once('error', this._logError(`fs.createReadStream`, logOpts)); const unzip = zlib.createUnzip().once('error', this._logError(`zlib.createUnzip`, logOpts)); const extract = tar.extract(dir).once('error', this._logError(`tar.extract`, logOpts)); pump(readableStream, unzip, extract, err => { if (err) return reject(err); fulfill(); }); } }; }