AvroBytesConverter(Schema schema) { this.schema = schema; dataReader = new SpecificDatumReader<>(this.schema); }
@Override protected SpecificDatumReader<T> initialValue() { return new SpecificDatumReader<>(KafkaAvroJobMonitor.this.schema); } };
@Override public DatumReader createDatumReader(Schema writer, Schema reader) { return new SpecificDatumReader(writer, reader, this); }
@Override public DatumReader createDatumReader(Schema schema) { return new SpecificDatumReader(schema, schema, this); }
public KaaFlumeEventReader(Class<T> clazz) { recordDataReader = new SpecificDatumReader<>(RecordData.class); recordReader = new SpecificDatumReader<>(clazz); }
/** * Instantiates a new Avro byte array converter based on class. * * @param typeParameterClass the type parameter class */ public AvroByteArrayConverter(Class<T> typeParameterClass) { super(); avroReader = new SpecificDatumReader<T>(typeParameterClass); avroWriter = new SpecificDatumWriter<T>(typeParameterClass); }
/** * Instantiates a new Avro json converter based on class. * * @param schema the schema * @param typeParameterClass the type parameter class */ public AvroJsonConverter(Schema schema, Class<T> typeParameterClass) { super(); this.schema = schema; avroReader = new SpecificDatumReader<T>(typeParameterClass); avroWriter = new SpecificDatumWriter<T>(typeParameterClass); }
@Override protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) { return new SpecificDatumReader<>(actual, expected, getSpecificData()); }
protected SpecificDatumReader<T> newReader() { return new SpecificDatumReader<>(schema); } protected SpecificDatumWriter<T> newWriter() {
@SuppressWarnings("OptionalUsedAsFieldOrParameterType") private static <T> AvroFactory<T> fromSpecific(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) { SpecificData specificData = new SpecificData(cl); Schema newSchema = specificData.getSchema(type); return new AvroFactory<>( specificData, newSchema, new SpecificDatumReader<>(previousSchema.orElse(newSchema), newSchema, specificData), new SpecificDatumWriter<>(newSchema, specificData) ); }
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream inputStream) throws ClassNotFoundException, IOException { recordClazz = (Class<? extends SpecificRecord>) inputStream.readObject(); schemaString = inputStream.readUTF(); typeInfo = (RowTypeInfo) AvroSchemaConverter.<Row>convertToTypeInfo(schemaString); schema = new Schema.Parser().parse(schemaString); if (recordClazz != null) { record = (SpecificRecord) SpecificData.newInstance(recordClazz, schema); } else { record = new GenericData.Record(schema); } datumReader = new SpecificDatumReader<>(schema); this.inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(this.inputStream, null); } }
@InterfaceAudience.Private @Override public DatumReader getReader(Class<SpecificRecord> clazz) { try { return new SpecificDatumReader(clazz.newInstance().getSchema()); } catch (Exception e) { throw new RuntimeException(e); } }
@Override public void configure(Map<String, ?> configs, boolean isKey) { InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]); _decoder = DecoderFactory.get().binaryDecoder(dummyInputStream, null); _reader = new SpecificDatumReader<AvroJobSpec>(AvroJobSpec.SCHEMA$); _versionWriter = new FixedSchemaVersionWriter(); }
void checkAvroInitialized() { if (datumReader != null) { return; } ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (SpecificRecord.class.isAssignableFrom(recordClazz)) { SpecificData specificData = new SpecificData(cl); this.datumReader = new SpecificDatumReader<>(specificData); this.reader = specificData.getSchema(recordClazz); } else { this.reader = new Schema.Parser().parse(schemaString); GenericData genericData = new GenericData(cl); this.datumReader = new GenericDatumReader<>(null, this.reader, genericData); } this.inputStream = new MutableByteArrayInputStream(); this.decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
@Override public void readExternal(ObjectInput in) throws IOException { new SpecificDatumReader(getSchema()) .read(this, SpecificData.getDecoder(in)); }
@Override public void readExternal(ObjectInput in) throws IOException { new SpecificDatumReader(getSchema()) .read(this, SpecificData.getDecoder(in)); } }
private DataFileReader<E> initReader(FileInputSplit split) throws IOException { DatumReader<E> datumReader; if (org.apache.avro.generic.GenericRecord.class == avroValueType) { datumReader = new GenericDatumReader<E>(); } else { datumReader = org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroValueType) ? new SpecificDatumReader<E>(avroValueType) : new ReflectDatumReader<E>(avroValueType); } if (LOG.isInfoEnabled()) { LOG.info("Opening split {}", split); } SeekableInput in = new FSDataInputStreamWrapper(stream, split.getPath().getFileSystem().getFileStatus(split.getPath()).getLen()); DataFileReader<E> dataFileReader = (DataFileReader) DataFileReader.openReader(in, datumReader); if (LOG.isDebugEnabled()) { LOG.debug("Loaded SCHEMA: {}", dataFileReader.getSchema()); } end = split.getStart() + split.getLength(); recordsReadSinceLastSync = 0; return dataFileReader; }
/** * Creates a Avro deserialization schema for the given specific record class. Having the * concrete Avro record class might improve performance. * * @param recordClazz Avro record class used to deserialize Avro's record to Flink's row */ public AvroRowDeserializationSchema(Class<? extends SpecificRecord> recordClazz) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; schema = SpecificData.get().getSchema(recordClazz); typeInfo = (RowTypeInfo) AvroSchemaConverter.convertToTypeInfo(recordClazz); schemaString = schema.toString(); record = (IndexedRecord) SpecificData.newInstance(recordClazz, schema); datumReader = new SpecificDatumReader<>(schema); inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
public T toObject(byte[] bytes) { Decoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(bytes, null); SpecificDatumReader<T> reader = null; try { reader = new SpecificDatumReader<T>(clazz); return reader.read(null, decoder); } catch(IOException e) { throw new SerializationException(e); } } }
@SuppressWarnings("unchecked") <T extends SpecificRecordBase> void verifySerDeAndStandardMethods(T original) { final SpecificDatumWriter<T> datumWriterFromSchema = new SpecificDatumWriter<>(original.getSchema()); final SpecificDatumReader<T> datumReaderFromSchema = new SpecificDatumReader<>(original.getSchema(), original.getSchema()); verifySerDeAndStandardMethods(original, datumWriterFromSchema, datumReaderFromSchema); final SpecificDatumWriter<T> datumWriterFromClass = new SpecificDatumWriter(original.getClass()); final SpecificDatumReader<T> datumReaderFromClass = new SpecificDatumReader(original.getClass()); verifySerDeAndStandardMethods(original, datumWriterFromClass, datumReaderFromClass); }