@Override public void writeRecord(Tuple2<K, V> record) throws IOException { this.recordWriter.write(record.f0, record.f1); } }
@Override @SuppressWarnings("unchecked") public void write(Writable r) throws IOException { mWriter.write(null, (V) r); }
@Override public synchronized void write(K key, V value) throws IOException { this.mWriter.write(null, value); }
@Override public void write(Object key, Object value) throws IOException { for (org.apache.hadoop.mapred.RecordWriter writer : writers) { writer.write(key, value); } } }
public synchronized void write(K key, V value) throws IOException { this.mWriter.write(null, value); }
@Override @SuppressWarnings("unchecked") public void write(Writable r) throws IOException { mWriter.write(null, (V) r); }
@Override public synchronized void write(K key, V value) throws IOException { this.mWriter.write(null, value); }
public void collect(Object key,Object value) throws IOException { writer.write(key,value); }
public synchronized void write(K key, V value) throws IOException { this.mWriter.write(null, value); }
@SuppressWarnings({"unchecked"}) public void write(Object key, Object value) throws IOException { reporter.incrCounter(COUNTERS_GROUP, counterName, 1); writer.write(key, value); }
@SuppressWarnings({"unchecked"}) public void collect(Object key) throws IOException{ AvroWrapper wrapper = new AvroWrapper(key); writer.write(wrapper, NullWritable.get()); }
/** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void collect(Object key, Object val) throws IOException { if (writer != null) writer.write(key, val); else { try { taskCtx.output().write(key, val); } catch (IgniteCheckedException e) { throw new IOException(e); } } }
@Override public void write(WritableComparable<?> key, HCatRecord value) throws IOException, InterruptedException { try { getBaseRecordWriter().write(null, serDe.serialize(value.getAll(), hcatRecordOI)); } catch (SerDeException e) { throw new IOException("Failed to serialize object", e); } }
@Override public void process(Object data, int tag) throws HiveException { //ArrowStreamReader expects at least the schema metadata, if this op writes no data, //we need to send the schema to close the stream gracefully VectorizedRowBatch batch = (VectorizedRowBatch) data; try { if(recordWriter == null) { recordWriter = LlapOutputFormatService.get().getWriter(this.attemptId); } //Convert the VectorizedRowBatch to a handle for the Arrow batch ArrowWrapperWritable writable = converter.serializeBatch(batch, true); //Pass the handle to the LlapOutputFormatService recordWriter recordWriter.write(null, writable); this.wroteData = true; } catch(Exception e) { LOG.error("Failed to convert VectorizedRowBatch to Arrow batch"); throw new RuntimeException(e); } }
@Override protected void closeOp(boolean abort) throws HiveException { try { if(!wroteData) { //Send a schema only batch to signal EOS with no data written ArrowWrapperWritable writable = converter.emptyBatch(); if(recordWriter == null) { recordWriter = LlapOutputFormatService.get().getWriter(this.attemptId); } recordWriter.write(null, writable); } } catch(Exception e) { LOG.error("Failed to write Arrow stream schema"); throw new RuntimeException(e); } finally { try { //Close the recordWriter with null Reporter recordWriter.close(null); } catch(Exception e) { LOG.error("Failed to close Arrow stream"); throw new RuntimeException(e); } } }
@Override public void write(WritableComparable<?> key, HCatRecord value) throws IOException, InterruptedException { LocalFileWriter localFileWriter = getLocalFileWriter(value); RecordWriter localWriter = localFileWriter.getLocalWriter(); ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector(); AbstractSerDe localSerDe = localFileWriter.getLocalSerDe(); OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo(); for (Integer colToDel : partColsToDel) { value.remove(colToDel); } try { // The key given by user is ignored - in case of Parquet we need to supply null Object keyToWrite = localWriter instanceof ParquetRecordWriterWrapper ? null : NullWritable.get(); localWriter.write(keyToWrite, localSerDe.serialize(value.getAll(), localObjectInspector)); } catch (SerDeException e) { throw new IOException("Failed to serialize object", e); } }
.new AvroTextRecordWriter(fileWriter, "\t".getBytes(StandardCharsets.UTF_8)); rw.write(null, null); rw.write(null, NullWritable.get()); rw.write(NullWritable.get(), null); rw.write(NullWritable.get(), NullWritable.get()); rw.write("k1", null); rw.write("k2", NullWritable.get()); rw.write(null, "v1"); rw.write(NullWritable.get(), "v2"); rw.write("k3", "v3"); rw.write(new Text("k4"), new Text("v4"));
writer.write(NullWritable.get(),text);
outFormat.getRecordWriter(fs, conf, testFilePath.toString(), Reporter.NULL); writer.write(NullWritable.get(), serde.serialize(new SimpleRow(null), inspector)); writer.write(NullWritable.get(), serde.serialize(new SimpleRow(null), inspector)); writer.write(NullWritable.get(), serde.serialize(new SimpleRow(null), inspector)); writer.close(Reporter.NULL);
outFormat.getRecordWriter(fs, conf, testFilePath.toString(), Reporter.NULL); writer.write(NullWritable.get(), serde.serialize(new NestedRow(1,2,3), inspector)); writer.write(NullWritable.get(), serde.serialize(new NestedRow(4,5,6), inspector)); writer.write(NullWritable.get(), serde.serialize(new NestedRow(7,8,9), inspector)); writer.close(Reporter.NULL);