/** * SpecificErrorBuilderBase copy constructor. * @param other SpecificErrorBuilderBase instance to copy. */ protected SpecificErrorBuilderBase(SpecificErrorBuilderBase<T> other) { super(other, SpecificData.get()); this.errorConstructor = other.errorConstructor; this.value = other.value; this.hasValue = other.hasValue; this.cause = other.cause; this.hasCause = other.hasCause; }
/** * Creates a ParquetWriterFactory for an Avro specific type. The Parquet writers will use the * schema of that specific type to build and write the columnar data. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> ParquetWriterFactory<T> forSpecificRecord(Class<T> type) { final String schemaString = SpecificData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, SpecificData.get(), out); return new ParquetWriterFactory<>(builder); }
@Before public void setUp() { Schema intSchema = Schema.create(Type.INT); intClass = SpecificData.get().getClass(intSchema); Schema nullSchema = Schema.create(Type.NULL); Schema nullIntUnionSchema = Schema.createUnion(Arrays.asList(nullSchema, intSchema)); integerClass = SpecificData.get().getClass(nullIntUnionSchema); }
/** * SpecificErrorBuilderBase copy constructor. * @param other SpecificErrorBuilderBase instance to copy. */ protected SpecificErrorBuilderBase(SpecificErrorBuilderBase<T> other) { super(other, SpecificData.get()); this.errorConstructor = other.errorConstructor; this.value = other.value; this.hasValue = other.hasValue; this.cause = other.cause; this.hasCause = other.hasCause; }
/** * Converts an Avro class into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroClass Avro specific record that contains schema information * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(Class<T> avroClass) { Preconditions.checkNotNull(avroClass, "Avro specific record class must not be null."); // determine schema to retrieve deterministic field order final Schema schema = SpecificData.get().getSchema(avroClass); return (TypeInformation<Row>) convertToTypeInfo(schema); }
/** * SpecificRecordBuilderBase copy constructor. * @param other SpecificRecordBuilderBase instance to copy. */ protected SpecificRecordBuilderBase(SpecificRecordBuilderBase<T> other) { super(other, SpecificData.get()); }
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream inputStream) throws ClassNotFoundException, IOException { recordClazz = (Class<? extends SpecificRecord>) inputStream.readObject(); schemaString = (String) inputStream.readObject(); if (recordClazz != null) { schema = SpecificData.get().getSchema(recordClazz); } else { schema = new Schema.Parser().parse(schemaString); } datumWriter = new SpecificDatumWriter<>(schema); arrayOutputStream = new ByteArrayOutputStream(); encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null); } }
/** * Creates a SpecificErrorBuilderBase for building errors of the given type. * @param schema the schema associated with the error class. */ protected SpecificErrorBuilderBase(Schema schema) { super(schema, SpecificData.get()); }
/** * Creates an Avro serialization schema for the given specific record class. * * @param recordClazz Avro record class used to serialize Flink's row to Avro's record */ public AvroRowSerializationSchema(Class<? extends SpecificRecord> recordClazz) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; this.schema = SpecificData.get().getSchema(recordClazz); this.schemaString = schema.toString(); this.datumWriter = new SpecificDatumWriter<>(schema); this.arrayOutputStream = new ByteArrayOutputStream(); this.encoder = EncoderFactory.get().binaryEncoder(arrayOutputStream, null); }
/** * Creates a SpecificErrorBuilderBase for building errors of the given type. * @param schema the schema associated with the error class. */ protected SpecificErrorBuilderBase(Schema schema) { super(schema, SpecificData.get()); }
/** * Creates a Avro deserialization schema for the given specific record class. Having the * concrete Avro record class might improve performance. * * @param recordClazz Avro record class used to deserialize Avro's record to Flink's row */ public AvroRowDeserializationSchema(Class<? extends SpecificRecord> recordClazz) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; schema = SpecificData.get().getSchema(recordClazz); typeInfo = (RowTypeInfo) AvroSchemaConverter.convertToTypeInfo(recordClazz); schemaString = schema.toString(); record = (IndexedRecord) SpecificData.newInstance(recordClazz, schema); datumReader = new SpecificDatumReader<>(schema); inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
/** * Creates a SpecificRecordBuilderBase by copying an existing record instance. * @param other the record instance to copy. */ protected SpecificRecordBuilderBase(T other) { super(other.getSchema(), SpecificData.get()); } }
public boolean equals(Object that) { if (that == this) return true; // identical object if (!(that instanceof SpecificExceptionBase)) return false; // not a record if (this.getClass() != that.getClass()) return false; // not same schema return SpecificData.get().compare(this, that, this.getSchema()) == 0; }
public boolean equals(Object that) { if (that == this) return true; // identical object if (!(that instanceof SpecificExceptionBase)) return false; // not a record if (this.getClass() != that.getClass()) return false; // not same schema return SpecificData.get().compare(this, that, this.getSchema()) == 0; }
@Override public boolean equals(Object that) { if (that == this) return true; // identical object if (!(that instanceof SpecificRecord)) return false; // not a record if (this.getClass() != that.getClass()) return false; // not same schema return SpecificData.get().compare(this, that, this.getSchema(), true) == 0; }
public void validateCountsFileGenericRecord() throws Exception { AvroColumnReader<GenericRecord > reader = new AvroColumnReader<> ( new AvroColumnReader.Params(countFiles).setModel(SpecificData.get())); int numWords = 0; for (GenericRecord wc : reader) { assertEquals((String)wc.get("key"), COUNTS.get(wc.get("key")), (Long)wc.get("value")); //assertEquals(wc.getKey(), COUNTS.get(wc.getKey()), wc.getValue()); numWords++; } reader.close(); assertEquals(COUNTS.size(), numWords); }
@Test public void testReservedEnumSymbol() throws Exception { Assert.assertEquals(Reserved.default$, SpecificData.get().createEnum("default", Reserved.SCHEMA$)); }
@Test /** Make sure that even with nulls, hashCode() doesn't throw NPE. */ public void testHashCode() { new TestRecord().hashCode(); SpecificData.get().hashCode(null, TestRecord.SCHEMA$); }
@Test public void testConvertGenericToSpecific() { GenericRecord generic = new GenericData.Record(TestRecord.SCHEMA$); generic.put("name", "foo"); generic.put("kind", new GenericData.EnumSymbol(Kind.SCHEMA$, "BAR")); generic.put("hash", new GenericData.Fixed (MD5.SCHEMA$, new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5})); TestRecord specific = (TestRecord)SpecificData.get().deepCopy(TestRecord.SCHEMA$, generic); }
public void validateCountsFile() throws Exception { AvroColumnReader<Pair<String,Long>> reader = new AvroColumnReader<>( new AvroColumnReader.Params(countFiles).setModel(SpecificData.get())); int numWords = 0; for (Pair<String,Long> wc : reader) { assertEquals(wc.key(), COUNTS.get(wc.key()), wc.value()); numWords++; } reader.close(); assertEquals(COUNTS.size(), numWords); }