@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public byte[] serialize(Row row) { try { // convert to record final GenericRecord record = convertRowToAvroRecord(schema, row); arrayOutputStream.reset(); datumWriter.write(record, encoder); encoder.flush(); return arrayOutputStream.toByteArray(); } catch (Exception e) { throw new RuntimeException("Failed to serialize row.", e); } }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
/** * Convert a GenericRecord to a byte array. */ public static byte[] recordToByteArray(GenericRecord record) throws IOException { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); writer.write(record, encoder); byte[] byteArray = out.toByteArray(); return byteArray; } }
private void dumpJson(PrintStream out, Schema schema, Object datum) throws IOException { DatumWriter<Object> writer = new GenericDatumWriter<>(schema); JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out, true); writer.write(datum, jsonEncoder); jsonEncoder.flush(); out.println(); out.flush(); }
/** {@inheritDoc} */ @Override public void serialize(AvroWrapper<T> avroWrapper) throws IOException { mAvroDatumWriter.write(avroWrapper.datum(), mAvroEncoder); // This would be a lot faster if the Serializer interface had a flush() method and the // Hadoop framework called it when needed. For now, we'll have to flush on every record. mAvroEncoder.flush(); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
@Override public void serialize(T value, DataOutputView target) throws IOException { if (CONCURRENT_ACCESS_CHECK) { enterExclusiveThread(); } try { checkAvroInitialized(); this.encoder.setOut(target); this.writer.write(value, this.encoder); } finally { if (CONCURRENT_ACCESS_CHECK) { exitExclusiveThread(); } } }
@Override public Map<String, String> writeRecord(final Record record) throws IOException { // If we are not writing an active record set, then we need to ensure that we write the // schema information. if (!isActiveRecordSet()) { flush(); schemaAccessWriter.writeHeader(recordSchema, getOutputStream()); } final GenericRecord rec = AvroTypeUtil.createAvroRecord(record, avroSchema); datumWriter.write(rec, encoder); return schemaAccessWriter.getAttributes(recordSchema); }
private static <T> byte[] render(T datum, Schema schema, DatumWriter<T> writer) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); writer.setSchema(schema); Encoder enc = new EncoderFactory().directBinaryEncoder(out, null); writer.write(datum, enc); enc.flush(); return out.toByteArray(); } }