@Override public DatumWriter createDatumWriter(Schema schema) { return new ReflectDatumWriter(schema, this); }
@Override public DatumWriter createDatumWriter(Schema schema) { return new ReflectDatumWriter(schema, this); }
/** * Constructor. * * @param writerSchema The writer schema for the Avro data being serialized. */ public AvroSerializer(Schema writerSchema) { if (null == writerSchema) { throw new IllegalArgumentException("Writer schema may not be null"); } mWriterSchema = writerSchema; mAvroDatumWriter = new ReflectDatumWriter<>(writerSchema); }
@InterfaceAudience.Private @Override public DatumWriter getWriter(Class<Object> clazz) { return new ReflectDatumWriter(); }
@Override protected DatumWriter<Object> getDatumWriter(Schema schema) { return new ReflectDatumWriter<>(schema, getReflectData()); }
@Override protected DatumWriter<Object> getDatumWriter(Schema schema) { return new ReflectDatumWriter<>(schema, getReflectData()); }
@SuppressWarnings("OptionalUsedAsFieldOrParameterType") private static <T> AvroFactory<T> fromReflective(Class<T> type, ClassLoader cl, Optional<Schema> previousSchema) { ReflectData reflectData = new ReflectData(cl); Schema newSchema = reflectData.getSchema(type); return new AvroFactory<>( reflectData, newSchema, new ReflectDatumReader<>(previousSchema.orElse(newSchema), newSchema, reflectData), new ReflectDatumWriter<>(newSchema, reflectData) ); }
private byte[] serialize(Object datum, Schema datumSchema) throws FlumeException { if (schema == null || !datumSchema.equals(schema)) { schema = datumSchema; out = new ByteArrayOutputStream(); writer = new ReflectDatumWriter<>(schema); encoder = EncoderFactory.get().binaryEncoder(out, null); } out.reset(); try { writer.write(datum, encoder); encoder.flush(); return out.toByteArray(); } catch (IOException e) { throw new FlumeException(e); } }
@Override public RecordWriter<K, V> getRecordWriter(FileSystem ignore, JobConf job, String name, Progressable prog) throws IOException { Schema schema = Schema.create(Schema.Type.BYTES); final byte[] keyValueSeparator = job.get("mapreduce.output.textoutputformat.separator", "\t").getBytes(UTF8); final DataFileWriter<ByteBuffer> writer = new DataFileWriter<>(new ReflectDatumWriter<>()); AvroOutputFormat.configureDataFileWriter(writer, job); Path path = FileOutputFormat.getTaskOutputPath(job, name+EXT); writer.create(schema, path.getFileSystem(job).create(path)); return new AvroTextRecordWriter(writer, keyValueSeparator); }
private AvroSchema(org.apache.avro.Schema schema, Map<String, String> properties) { this.schema = schema; this.schemaInfo = new SchemaInfo(); this.schemaInfo.setName(""); this.schemaInfo.setProperties(properties); this.schemaInfo.setType(SchemaType.AVRO); this.schemaInfo.setSchema(this.schema.toString().getBytes()); this.byteArrayOutputStream = new ByteArrayOutputStream(); this.encoder = EncoderFactory.get().binaryEncoder(this.byteArrayOutputStream, this.encoder); this.datumWriter = new ReflectDatumWriter<>(this.schema); this.reader = new ReflectDatumReader<>(this.schema); }
public byte[] toBytes(T object) { ByteArrayOutputStream output = new ByteArrayOutputStream(); Encoder encoder = new BinaryEncoder(output); ReflectDatumWriter<T> datumWriter = null; try { datumWriter = new ReflectDatumWriter<T>(clazz); datumWriter.write(object, encoder); encoder.flush(); } catch(IOException e) { throw new SerializationException(e); } finally { SerializationUtils.close(output); } return output.toByteArray(); }
schema = userDefinedSchema; } else { datumWriter = new ReflectDatumWriter<E>(avroValueType); schema = ReflectData.get().getSchema(avroValueType);
@Override public byte[] serialise(final Object object) throws SerialisationException { Schema schema = ReflectData.get().getSchema(object.getClass()); DatumWriter<Object> datumWriter = new ReflectDatumWriter<>(schema); DataFileWriter<Object> dataFileWriter = new DataFileWriter<>(datumWriter); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try { dataFileWriter.create(schema, byteOut); dataFileWriter.append(object); dataFileWriter.flush(); } catch (final IOException e) { throw new SerialisationException("Unable to serialise given object of class: " + object.getClass().getName(), e); } finally { close(dataFileWriter); } return byteOut.toByteArray(); }
@SuppressWarnings("unchecked") ReflectTest(String name, T sample, int factor) throws IOException { super(name, ReflectData.get().getSchema(sample.getClass()).toString(), factor); clazz = (Class<T>) sample.getClass(); reader = new ReflectDatumReader<>(schema); writer = new ReflectDatumWriter<>(schema); }
private static <T> byte[] serializeWithReflectDatumWriter(T toSerialize, Class<T> toSerializeClass) throws IOException { ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<>(toSerializeClass); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null); datumWriter.write(toSerialize, encoder); encoder.flush(); return byteArrayOutputStream.toByteArray(); }
private <T> byte[] testJsonEncoder (String testType, T entityObj) throws IOException { ReflectData rdata = ReflectData.AllowNull.get(); Schema schema = rdata.getSchema(entityObj.getClass()); ByteArrayOutputStream os = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().jsonEncoder(schema, os); ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<>(schema, rdata); datumWriter.write(entityObj, encoder); encoder.flush(); byte[] bytes = os.toByteArray(); System.out.println ("JSON encoder output:\n" + new String(bytes)); return bytes; }
@Override public void configure(Context context) { int syncIntervalBytes = context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES); String compressionCodec = context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC); writer = new ReflectDatumWriter<T>(getSchema()); dataFileWriter = new DataFileWriter<T>(writer); dataFileWriter.setSyncInterval(syncIntervalBytes); try { CodecFactory codecFactory = CodecFactory.fromString(compressionCodec); dataFileWriter.setCodec(codecFactory); } catch (AvroRuntimeException e) { logger.warn("Unable to instantiate avro codec with name (" + compressionCodec + "). Compression disabled. Exception follows.", e); } }
/** * Writes a "names.avro" file with five sequential <id, name> pairs. */ private void writeNamesFiles(File dir) throws IOException { DatumWriter<NamesRecord> writer = new ReflectDatumWriter<>(); File namesFile = new File(dir + "/names.avro"); try (DataFileWriter<NamesRecord> out = new DataFileWriter<>(writer)) { out.create(ReflectData.get().getSchema(NamesRecord.class), namesFile); for (int i = 0; i < 5; i++) { out.append(new NamesRecord(i, "record" + i)); } } }
private void writeLinesFile(File dir) throws IOException { DatumWriter<Text> writer = new ReflectDatumWriter<>(); DataFileWriter<Text> out = new DataFileWriter<>(writer); File linesFile = new File(dir+"/lines.avro"); dir.mkdirs(); out.create(ReflectData.get().getSchema(Text.class), linesFile); for (String line : WordCountUtil.LINES) out.append(new Text(line)); out.close(); }
/** * Writes a "balances.avro" file with five sequential <id, balance> pairs. */ private void writeBalancesFiles(File dir) throws IOException { DatumWriter<BalancesRecord> writer = new ReflectDatumWriter<>(); File namesFile = new File(dir + "/balances.avro"); try (DataFileWriter<BalancesRecord> out = new DataFileWriter<>(writer)) { out.create(ReflectData.get().getSchema(BalancesRecord.class), namesFile); for (int i = 0; i < 5; i++) { out.append(new BalancesRecord(i, (long) i + 100)); } } }