Refine search
@Override public void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); }
@Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); }
/** * Creates a ParquetWriterFactory for an Avro specific type. The Parquet writers will use the * schema of that specific type to build and write the columnar data. * * @param type The class of the type to write. */ public static <T extends SpecificRecordBase> ParquetWriterFactory<T> forSpecificRecord(Class<T> type) { final String schemaString = SpecificData.get().getSchema(type).toString(); final ParquetBuilder<T> builder = (out) -> createAvroParquetWriter(schemaString, SpecificData.get(), out); return new ParquetWriterFactory<>(builder); }
public SpecificDatumWriter(Class<T> c) { super(SpecificData.get().getSchema(c), SpecificData.getForClass(c)); }
/** * For RECORD type schemas, this method returns the SpecificData instance of the class associated with the schema, * in order to get the right conversions for any logical types used. * * @param reader the reader schema * @return the SpecificData associated with the schema's class, or the default instance. */ public static SpecificData getForSchema(Schema reader) { if (reader.getType() == Type.RECORD) { final String className = getClassName(reader); if (className != null) { final Class<?> clazz; try { clazz = Class.forName(className); return getForClass(clazz); } catch (ClassNotFoundException e) { return SpecificData.get(); } } } return SpecificData.get(); }
@Override public Class getClass(Schema schema) { Conversion<?> conversion = getConversionFor(schema.getLogicalType()); if (conversion != null) { return conversion.getConvertedType(); switch (schema.getType()) { case ARRAY: Class collectionClass = getClassProp(schema, CLASS_PROP); if (collectionClass != null) return collectionClass; Class elementClass = getClass(schema.getElementType()); if(elementClass.isPrimitive()) { if (Character.class.getName().equals(intClass)) return Character.TYPE; default: return super.getClass(schema);
@Override public void setSchema(Schema actual) { // if expected is unset and actual is a specific record, // then default expected to schema of currently loaded class if (getExpected() == null && actual != null && actual.getType() == Schema.Type.RECORD) { SpecificData data = getSpecificData(); Class c = data.getClass(actual); if (c != null && SpecificRecord.class.isAssignableFrom(c)) setExpected(data.getSchema(c)); } super.setSchema(actual); }
/** * Creates a Avro deserialization schema for the given specific record class. Having the * concrete Avro record class might improve performance. * * @param recordClazz Avro record class used to deserialize Avro's record to Flink's row */ public AvroRowDeserializationSchema(Class<? extends SpecificRecord> recordClazz) { Preconditions.checkNotNull(recordClazz, "Avro record class must not be null."); this.recordClazz = recordClazz; schema = SpecificData.get().getSchema(recordClazz); typeInfo = (RowTypeInfo) AvroSchemaConverter.convertToTypeInfo(recordClazz); schemaString = schema.toString(); record = (IndexedRecord) SpecificData.newInstance(recordClazz, schema); datumReader = new SpecificDatumReader<>(schema); inputStream = new MutableByteArrayInputStream(); decoder = DecoderFactory.get().binaryDecoder(inputStream, null); }
@Override public Object respond(Message message, Object request) throws Exception { int numParams = message.getRequest().getFields().size(); Object[] params = new Object[numParams]; Class[] paramTypes = new Class[numParams]; int i = 0; try { for (Schema.Field param: message.getRequest().getFields()) { params[i] = ((GenericRecord)request).get(param.name()); paramTypes[i] = getSpecificData().getClass(param.schema()); i++; } Method method = impl.getClass().getMethod(message.getName(), paramTypes); method.setAccessible(true); return method.invoke(impl, params); } catch (InvocationTargetException e) { if (e.getTargetException() instanceof Exception) { throw (Exception) e.getTargetException(); } else { throw new Exception(e.getTargetException()); } } catch (NoSuchMethodException e) { throw new AvroRuntimeException(e); } catch (IllegalAccessException e) { throw new AvroRuntimeException(e); } }
@Before public void setUp() { Schema intSchema = Schema.create(Type.INT); intClass = SpecificData.get().getClass(intSchema); Schema nullSchema = Schema.create(Type.NULL); Schema nullIntUnionSchema = Schema.createUnion(Arrays.asList(nullSchema, intSchema)); integerClass = SpecificData.get().getClass(nullIntUnionSchema); }
@Test public void testNamedCodecs() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(key) .withValueSchema(value) .withConfiguration(conf) .withPath(myfile); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer; for(String codec : new String[]{"null", "deflate", "snappy", "bzip2"}) { LOG.debug("Using " + codec + "codec for a SortedKeyValueFile..."); options.withCodec(codec); writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>( new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals(codec, reader.getMetaString("avro.codec")); reader.close(); } }
/** Construct for reading instances of a class. */ public SpecificDatumReader(Class<T> c) { this(new SpecificData(c.getClassLoader())); setSchema(getSpecificData().getSchema(c)); }
/** * Converts an Avro class into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroClass Avro specific record that contains schema information * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(Class<T> avroClass) { Preconditions.checkNotNull(avroClass, "Avro specific record class must not be null."); // determine schema to retrieve deterministic field order final Schema schema = SpecificData.get().getSchema(avroClass); return (TypeInformation<Row>) convertToTypeInfo(schema); }
/** * SpecificErrorBuilderBase copy constructor. * @param other SpecificErrorBuilderBase instance to copy. */ protected SpecificErrorBuilderBase(SpecificErrorBuilderBase<T> other) { super(other, SpecificData.get()); this.errorConstructor = other.errorConstructor; this.value = other.value; this.hasValue = other.hasValue; this.cause = other.cause; this.hasCause = other.hasCause; }
public boolean equals(Object that) { if (that == this) return true; // identical object if (!(that instanceof SpecificExceptionBase)) return false; // not a record if (this.getClass() != that.getClass()) return false; // not same schema return SpecificData.get().compare(this, that, this.getSchema()) == 0; }
@Override public Object createFixed(Object old, Schema schema) { Class c = getClass(schema); if (c == null) return super.createFixed(old, schema); // punt to generic return c.isInstance(old) ? old : newInstance(c, schema); }