/** * Encode record to byte array. * * @param record the object to encode * @return the byte[] * @throws IOException Signals that an I/O exception has occurred. */ public byte[] encode(T record) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); binaryEncoder = EncoderFactory.get().binaryEncoder(baos, binaryEncoder); datumWriter.write(record, binaryEncoder); binaryEncoder.flush(); baos.flush(); return baos.toByteArray(); }
/** Open a new file for data matching a schema with an explicit sync. */ public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync) throws IOException { assertNotOpen(); this.schema = schema; setMetaInternal(DataFileConstants.SCHEMA, schema.toString()); if (sync == null ) { this.sync = generateSync(); } else if (sync.length == 16) { this.sync = sync; } else { throw new IOException("sync must be exactly 16 bytes"); } init(outs); vout.writeFixed(DataFileConstants.MAGIC); // write magic vout.writeMapStart(); // write metadata vout.setItemCount(meta.size()); for (Map.Entry<String,byte[]> entry : meta.entrySet()) { vout.startItem(); vout.writeString(entry.getKey()); vout.writeBytes(entry.getValue()); } vout.writeMapEnd(); vout.writeFixed(this.sync); // write initial sync vout.flush(); //vout may be buffered, flush before writing to out return this; }
void writeBlockTo(BinaryEncoder e, byte[] sync) throws IOException { e.writeLong(this.numEntries); e.writeLong(this.blockSize); e.writeFixed(this.data, offset, this.blockSize); e.writeFixed(sync); if (flushOnWrite) { e.flush(); } }
public void serialize(TetherData datum) throws IOException { encoder.writeBytes(datum.buffer()); encoder.flush(); //Flush shouldn't be required. Might be a bug in AVRO. }
public byte[] serialize(String topic, GenericRecord data) throws SerializationException { Schema schema = data.getSchema(); MD5Digest schemaId = null; try { schemaId = schemaRegistry.register(topic, schema); ByteArrayOutputStream out = new ByteArrayOutputStream(); // MAGIC_BYTE | schemaId-bytes | avro_payload out.write(LiAvroSerDeHelper.MAGIC_BYTE); out.write(schemaId.asBytes()); BinaryEncoder encoder = encoderFactory.directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema); writer.write(data, encoder); encoder.flush(); byte[] bytes = out.toByteArray(); out.close(); return bytes; } catch (IOException | SchemaRegistryException e) { throw new SerializationException(e); } }
Producer<byte[], byte[]> producer = new Producer<>(producerConfig); try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536)) { for (File avroFile : avroFiles) { try (DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile)) { BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null); GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(reader.getSchema()); outputStream.reset(); if (header != null && 0 < header.length) { outputStream.write(header); datumWriter.write(genericRecord, binaryEncoder); binaryEncoder.flush();
private byte[] serializeEvent(Event event, boolean useAvroEventFormat) throws IOException { byte[] bytes; if (useAvroEventFormat) { if (!tempOutStream.isPresent()) { tempOutStream = Optional.of(new ByteArrayOutputStream()); } if (!writer.isPresent()) { writer = Optional.of(new SpecificDatumWriter<AvroFlumeEvent>(AvroFlumeEvent.class)); } tempOutStream.get().reset(); AvroFlumeEvent e = new AvroFlumeEvent(toCharSeqMap(event.getHeaders()), ByteBuffer.wrap(event.getBody())); encoder = EncoderFactory.get().directBinaryEncoder(tempOutStream.get(), encoder); writer.get().write(e, encoder); encoder.flush(); bytes = tempOutStream.get().toByteArray(); } else { bytes = event.getBody(); } return bytes; }
@Override public byte[] serialize() { final ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write( KnownProtocols.MAJOR_VERSION ); out.write( KnownProtocols.LATEST_MINOR_VERSION ); Schema msgSchema = protocol.getType( "Message" ); GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<>( msgSchema ); BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder( out, null ); GenericRecord message = new GenericData.Record( msgSchema ); message.put( "classReferences", classReferences ); message.put( "operations", operations ); operations = null; try { writer.write( message, encoder ); encoder.flush(); } catch (IOException e) { throw log.unableToSerializeInAvro( e ); } return out.toByteArray(); }
@BeforeClass public static void generateData() throws IOException { seed = (int)System.currentTimeMillis(); // note some tests (testSkipping) rely on this explicitly String jsonSchema = "{\"type\": \"record\", \"name\": \"Test\", \"fields\": [" +"{\"name\":\"intField\", \"type\":\"int\"}," +"{\"name\":\"bytesField\", \"type\":\"bytes\"}," +"{\"name\":\"booleanField\", \"type\":\"boolean\"}," +"{\"name\":\"stringField\", \"type\":\"string\"}," +"{\"name\":\"floatField\", \"type\":\"float\"}," +"{\"name\":\"doubleField\", \"type\":\"double\"}," +"{\"name\":\"arrayField\", \"type\": " + "{\"type\":\"array\", \"items\":\"boolean\"}}," +"{\"name\":\"longField\", \"type\":\"long\"}]}"; schema = Schema.parse(jsonSchema); GenericDatumWriter<Object> writer = new GenericDatumWriter<>(); writer.setSchema(schema); ByteArrayOutputStream baos = new ByteArrayOutputStream(8192); BinaryEncoder encoder = e_factory.binaryEncoder(baos, null); for (Object datum : new RandomData(schema, count, seed)) { writer.write(datum, encoder); records.add(datum); } encoder.flush(); data = baos.toByteArray(); }
@Test public void testNew() throws IOException { ByteBuffer payload = ByteBuffer.allocateDirect(8 * 1024); for (int i = 0; i < 500; i++) { payload.putInt(1); } payload.flip(); ByteBufferRecord bbr = new ByteBufferRecord(); bbr.setPayload(payload); bbr.setTp(TypeEnum.b); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ReflectDatumWriter<ByteBufferRecord> writer = new ReflectDatumWriter<ByteBufferRecord>(ByteBufferRecord.class); BinaryEncoder avroEncoder = EncoderFactory.get().blockingBinaryEncoder(outputStream, null); writer.write(bbr, avroEncoder); avroEncoder.flush(); byte[] bytes = outputStream.toByteArray(); ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes); ReflectDatumReader<ByteBufferRecord> datumReader = new ReflectDatumReader<ByteBufferRecord>(ByteBufferRecord.class); BinaryDecoder avroDecoder = DecoderFactory.get().binaryDecoder(inputStream, null); ByteBufferRecord deserialized = datumReader.read(null, avroDecoder); Assert.assertEquals(bbr, deserialized); }
@Override public byte[] objectToByteBuffer(Object o) throws IOException{ ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); if (o instanceof GenericContainer) { oos.write(RECORD); GenericContainer container = (GenericContainer) o; oos.writeObject((container.getSchema().getFullName())); DatumWriter<GenericContainer> datumWriter = new GenericDatumWriter<>(container.getSchema()); BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null); datumWriter.write(container,encoder); encoder.flush(); } else if (o instanceof Schema) { oos.write(SCHEMA); oos.writeObject(o.toString()); } else { oos.write(OTHER); oos.writeObject(o); } return baos.toByteArray(); }
public byte[] toBytes(Schema toSchema, Object o) { if (toSchema != null && toSchema.getType() == Type.UNION) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); org.apache.avro.io.BinaryEncoder avroEncoder = EncoderFactory.get().binaryEncoder(baos, null); int unionIndex = 0; try { if (o == null) { unionIndex = firstNullSchemaTypeIndex(toSchema); avroEncoder.writeIndex(unionIndex); avroEncoder.writeNull(); } else { unionIndex = firstNotNullSchemaTypeIndex(toSchema); avroEncoder.writeIndex(unionIndex); avroEncoder.writeBytes(toBytes(o)); } avroEncoder.flush(); return baos.toByteArray(); } catch (IOException e) { LOG.error(e.getMessage()); return toBytes(o); } } else { return toBytes(o); } }
@Override public synchronized byte[] encode(GenericRecord message) { checkArgument(message instanceof GenericAvroRecord); GenericAvroRecord gar = (GenericAvroRecord) message; try { datumWriter.write(gar.getAvroRecord(), this.encoder); this.encoder.flush(); return this.byteArrayOutputStream.toByteArray(); } catch (Exception e) { throw new SchemaSerializationException(e); } finally { this.byteArrayOutputStream.reset(); } }
@Override public synchronized byte[] encode(T message) { try { datumWriter.write(message, this.encoder); this.encoder.flush(); return this.byteArrayOutputStream.toByteArray(); } catch (Exception e) { throw new SchemaSerializationException(e); } finally { this.byteArrayOutputStream.reset(); } }
@Override public void handlePayloadSerialization(OutputStream outputStream, Object input) { try { Schema schema = AvroUtils.computeSchema(input); if (input instanceof byte[]) { outputStream.write((byte[]) input); } else { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null); DatumWriter<Object> writer; if (input instanceof SpecificRecord) { writer = new SpecificDatumWriter<>(schema); } else { writer = new GenericDatumWriter<>(schema); } writer.write(input, encoder); encoder.flush(); } } catch (IOException e) { throw new AvroRetryableException("Error serializing Avro message", e); } catch (RuntimeException e) { // avro serialization can throw AvroRuntimeException, NullPointerException, // ClassCastException, etc throw new AvroException("Error serializing Avro message", e); } }
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(byteBufferOutputStream, null); encoder.flush();
@Override public void encode(D datum, OutputStream stream) throws IOException { BinaryEncoder encoder = EncoderFactory.get() .directBinaryEncoder(stream, ENCODER.get()); ENCODER.set(encoder); writer.write(datum, encoder); encoder.flush(); }
try { Protocol remote = handshake(in, out, connection); out.flush(); if (remote == null) // handshake failed return bbo.getBufferList(); return null; out.writeBoolean(error != null); if (error == null) writeResponse(m.getResponse(), response, out); context.setError(e); bbo = new ByteBufferOutputStream(); out = EncoderFactory.get().binaryEncoder(bbo, null); out.writeBoolean(true); writeError(Protocol.SYSTEM_ERRORS, new Utf8(e.toString()), out); if (null == handshake) { out.flush(); payload = bbo.getBufferList(); plugin.serverSendResponse(context); META_WRITER.write(context.responseCallMeta(), out); out.flush();
public static void saveLabeledDumps(final File file, final Map<String, SampleNode> collected) throws IOException { try (BufferedOutputStream bos = new BufferedOutputStream( Files.newOutputStream(file.toPath()))) { final SpecificDatumWriter<StackSampleElement> writer = new SpecificDatumWriter<>(StackSampleElement.SCHEMA$); final BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(bos, null); encoder.writeMapStart(); encoder.setItemCount(collected.size()); for (Map.Entry<String, SampleNode> entry : collected.entrySet()) { encoder.startItem(); encoder.writeString(entry.getKey()); encoder.writeArrayStart(); Converter.convert(Methods.ROOT, entry.getValue(), -1, 0, (final StackSampleElement object, final long deadline) -> { encoder.setItemCount(1L); encoder.startItem(); writer.write(object, encoder); }); encoder.writeArrayEnd(); } encoder.writeMapEnd(); encoder.flush(); } }
/** * Gets the request data, generating it first if necessary. * @return the request data. * @throws Exception if an error occurs generating the request data. */ public List<ByteBuffer> getBytes() throws Exception { if (requestBytes == null) { ByteBufferOutputStream bbo = new ByteBufferOutputStream(); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bbo, encoder); // use local protocol to write request Message m = getMessage(); context.setMessage(m); writeRequest(m.getRequest(), request, out); // write request payload out.flush(); List<ByteBuffer> payload = bbo.getBufferList(); writeHandshake(out); // prepend handshake if needed context.setRequestPayload(payload); for (RPCPlugin plugin : rpcMetaPlugins) { plugin.clientSendRequest(context); // get meta-data from plugins } META_WRITER.write(context.requestCallMeta(), out); out.writeString(m.getName()); // write message name out.flush(); bbo.append(payload); requestBytes = bbo.getBufferList(); } return requestBytes; } }