constructor(log = new Log()) { this.log = log; this.running = []; this.stdout = stdoutStream(); this.stderr = stderrStream(); }
fs.createReadStream(process.argv[2]) .pipe(split()) .pipe(new LimitedParallelStream(2, (url, enc, push, done) => { if(!url) return done(); request.head(url, (err, response) => { push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) .on('finish', () => console.log('All urls were checked')) ;
// Log every request (not /) to the console app.use(morgan('dev', { skip: function(req, res) { if(req.url == "/") return true; }, stream: require('split')().on('data', function(line) { process.stdout.write(moment().format("YYYY-MM-DD HH:mm:ss-SSS") + " " + line + "\n"); }) }));
MetaUtil({ 'delay': (process.argv[5] || 100), 'start': Number(process.argv[3]), 'end': Number(process.argv[4]) }).pipe(through( write, //Write function, transform and push buffer at threshold function() { if (bulkBuffer.length > 0) { pushToES(bulkBuffer, 'exit'); } //End function: push contents of buffer and hang up } ))
readStream .pipe(split(JSON.parse)) .on('data', function (line) { line.id = ++n; console.log(`${n}: ${JSON.stringify(line)}`); });
MetaUtil().pipe(through( write, //Write function, transform and push buffer at threshold function() { process.exit(0) } ))
split().on('data', (message) => { logger.info(message); })
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(new ParallelStream(transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked'); console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(throughParallel.obj({concurrency: 2}, transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked'); console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
fs.createReadStream(process.argv[2]) .pipe(split()) .pipe(throughParallel.obj({concurrency: 2}, function (url, enc, done) { if(!url) return done(); request.head(url, (err, response) => { this.push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) .on('finish', () => console.log('All urls were checked')) ;
// read the file with the url fs.createReadStream(process.argv[2]) // ensures outputing each line on a different chunk .pipe(split()) .pipe(new LimitedParallelStream(concurrency, transformer)) .pipe(fs.createWriteStream('urlList-results.txt')) //destination stream .on('finish', () => { console.log('All urls were checked') console.log(`Execution time : ${Date.now()-startTime}(ms)`); });
fs.createReadStream(process.argv[2]) //[1] .pipe(split()) //[2] .pipe(new ParallelStream((url, enc, done, push) => { //[3] if(!url) return done(); request.head(url, (err, response) => { push(url + ' is ' + (err ? 'down' : 'up') + '\n'); done(); }); })) .pipe(fs.createWriteStream('results.txt')) //[4] .on('finish', () => console.log('All urls were checked')) ;
// Log every request (not /) to the console app.use(morgan('dev', { skip: function(req, res) { if(req.url == "/") return true; }, stream: require('split')().on('data', function(line) { process.stdout.write(moment().format("YYYY-MM-DD HH:mm:ss-SSS") + " " + line + "\n"); }) }));
fs.createReadStream(process.argv[2]) .pipe(split()) .pipe( new ParallelStream((url, enc, push, done) => { if (!url) return done && done(); request.head(url, (err, response) => { push(`${url} is ${err ? "down" : "up"} \n`); done && done(); }); }) ) .pipe(fs.createWriteStream("results.txt")) .on("finish", () => console.log("All urls were checked"));
split().on('data', (message) => { logger.info(message); })