const uploadEvents = async (events) => { const outStream = zlib.createGzip(); events.forEach(e => { outStream.write(`${JSON.stringify(humps.decamelizeKeys(e))}\n`, 'utf8'); }); outStream.end(); const date = new Date().toISOString(); const partitionPrefix = date.substring(0, 13); const fileName = `dt=${partitionPrefix}/${date}.json.gz`; const params = { Bucket: process.env.HN_INSIGHTS_EVENTS_BUCKET || 'hn-insights-events', Key: fileName, Body: outStream }; console.log(`Uploading ${fileName}: ${events.length} events...`); await s3.upload(params).promise(); console.log(`Uploading ${fileName} done`); }
srcStream.on('end', () => { // check if there's buffered data left within // the transform stream and force push it const remainingData = gzipStream.read() if (remainingData !== null) { destStream.write(remainingData) } gzipStream.end() })
case '/testget': res.writeHead(200) res.end('Hi.') break case '/slowres': setTimeout(() => { res.writeHead(200) res.end('That was slow.') }, 1300); break case '/notjson': res.writeHead(200) res.end('hey') break case '/chunked': res.write('hi') setTimeout(() => { res.end('hey') }, 50) break case '/json': res.writeHead(200) res.end(JSON.stringify({ 'hi': 'hey' })) case '/corrected': res.writeHead(200)