this.decoderFactory = new DecoderFactory(); md5ToAvroSchemaMap = new MD5AvroSchemaMap();
/** * Change the schema of an Avro record. * @param record The Avro record whose schema is to be changed. * @param newSchema The target schema. It must be compatible as reader schema with record.getSchema() as writer schema. * @return a new Avro record with the new schema. * @throws IOException if conversion failed. */ public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException { if (record.getSchema().equals(newSchema)) { return record; } try { BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null); DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema); return reader.read(null, decoder); } catch (IOException e) { throw new IOException( String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e); } }
public ColumnDecoder(InputStream in) { this.in = in; this.wrappedDecoder = new DecoderFactory().binaryDecoder(in, null); this.dataIn = new DataInputStream(in); }
public ColumnDecoder(InputStream in) { this.in = in; this.wrappedDecoder = new DecoderFactory().binaryDecoder(in, null); this.dataIn = new DataInputStream(in); }
public ColumnDecoder(InputStream in) { this.in = in; this.wrappedDecoder = new DecoderFactory().binaryDecoder(in, null); this.dataIn = new DataInputStream(in); }
/** * Given a byte array and a DatumReader, decode an avro entity from the byte * array. Decodes using the avro BinaryDecoder. Return the constructed entity. * * @param bytes * The byte array to decode the entity from. * @param reader * The DatumReader that will decode the byte array. * @return The Avro entity. */ public static <T> T readAvroEntity(byte[] bytes, DatumReader<T> reader) { Decoder decoder = new DecoderFactory().binaryDecoder(bytes, null); return AvroUtils.<T> readAvroEntity(decoder, reader); }
/** * Given a byte array and a DatumReader, decode an avro entity from the byte * array. Decodes using the avro BinaryDecoder. Return the constructed entity. * * @param bytes * The byte array to decode the entity from. * @param reader * The DatumReader that will decode the byte array. * @return The Avro entity. */ public static <T> T readAvroEntity(byte[] bytes, DatumReader<T> reader) { Decoder decoder = new DecoderFactory().binaryDecoder(bytes, null); return AvroUtils.<T> readAvroEntity(decoder, reader); }
/** * Given a byte array and a DatumReader, decode an avro entity from the byte * array. Decodes using the avro BinaryDecoder. Return the constructed entity. * * @param bytes * The byte array to decode the entity from. * @param reader * The DatumReader that will decode the byte array. * @return The Avro entity. */ public static <T> T readAvroEntity(byte[] bytes, DatumReader<T> reader) { Decoder decoder = new DecoderFactory().binaryDecoder(bytes, null); return AvroUtils.<T> readAvroEntity(decoder, reader); }
@Override public GenericRecord read(final byte[] buffer) throws InvalidDataException { final DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(getSchema()); final ByteArrayInputStream stream = new ByteArrayInputStream(buffer); stream.reset(); final BinaryDecoder binaryDecoder = new DecoderFactory().binaryDecoder(stream, null); try { return datumReader.read(null, binaryDecoder); } catch (IOException e) { throw new InvalidDataException("Error decoding data", e); } } }
/** * Change the schema of an Avro record. * @param record The Avro record whose schema is to be changed. * @param newSchema The target schema. It must be compatible as reader schema with record.getSchema() as writer schema. * @return a new Avro record with the new schema. * @throws IOException if conversion failed. */ public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException { if (record.getSchema().equals(newSchema)) { return record; } try { BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null); DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema); return reader.read(null, decoder); } catch (IOException e) { throw new IOException( String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e); } }
/** * Change the schema of an Avro record. * @param record The Avro record whose schema is to be changed. * @param newSchema The target schema. It must be compatible as reader schema with record.getSchema() as writer schema. * @return a new Avro record with the new schema. * @throws IOException if conversion failed. */ public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException { if (record.getSchema().equals(newSchema)) { return record; } try { BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null); DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema); return reader.read(null, decoder); } catch (IOException e) { throw new IOException( String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e); } }
/** * Converts the avro binary data to the json format */ @Override public XContentBuilder serialize(Event event) { XContentBuilder builder = null; try { if (datumReader != null) { Decoder decoder = new DecoderFactory().binaryDecoder(event.getBody(), null); GenericRecord data = datumReader.read(null, decoder); logger.trace("Record in event " + data); XContentParser parser = XContentFactory .xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, data.toString()); builder = jsonBuilder().copyCurrentStructure(parser); parser.close(); } else { logger.error("Schema File is not configured"); } } catch (IOException e) { logger.error("Exception in parsing avro format data but continuing serialization to process further records", e.getMessage(), e); } return builder; }
DecoderFactory decoderFactory = new DecoderFactory(); Decoder d; switch (format) {
if (schema.getType() != Type.STRING) { GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema); datum = reader.read(null, new DecoderFactory().jsonDecoder(schema, jsonValue));
Object datum = reader.read(null, new DecoderFactory().jsonDecoder(mSchema, mJsonValue));
protected void init() throws KettleException { if ( m_schemaToUse != null ) { m_avroToPdiConverter = new AvroToPdiConverter( m_schemaToUse ); initTopLevelStructure( m_schemaToUse, true ); } if ( m_normalFields == null || m_normalFields.size() == 0 ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.NoFieldPathsDefined" ) ); } m_expansionHandler = checkFieldPaths( m_normalFields, m_outputRowMeta ); int killmeIndex = 0; for ( AvroInputField f : m_normalFields ) { //bypass this for now: int outputIndex = m_outputRowMeta.indexOfValue( f.getPentahoFieldName() ); int outputIndex = killmeIndex++; fieldInit( f, outputIndex ); } if ( m_expansionHandler != null ) { m_expansionHandler.init(); } m_factory = new DecoderFactory(); }
m_factory = new DecoderFactory();
Assert.assertTrue(json.contains("true")); final JsonDecoder decoder = new DecoderFactory().jsonDecoder(avroSchema, new ByteArrayInputStream(bytes)); final DatumReader<GenericRecord> reader = new GenericDatumReader<>(avroSchema); GenericRecord entry;
m_factory = new DecoderFactory(); if ( jsonEncoded ) { try {