/** {@inheritDoc} */ @Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { final FileSplit file = (FileSplit)inputSplit; context.setStatus(file.toString()); final AvroColumnReader.Params params = new AvroColumnReader.Params(new HadoopInput(file.getPath(), context.getConfiguration())); params.setModel(ReflectData.get()); if (AvroJob.getInputKeySchema(context.getConfiguration()) != null) { params.setSchema(AvroJob.getInputKeySchema(context.getConfiguration())); } reader = new AvroColumnReader<>(params); rows = reader.getRowCount(); }
@Override public RecordReader<AvroWrapper<T>, NullWritable> getRecordReader(InputSplit split, final JobConf job, Reporter reporter) throws IOException { final FileSplit file = (FileSplit)split; reporter.setStatus(file.toString()); final AvroColumnReader.Params params = new AvroColumnReader.Params(new HadoopInput(file.getPath(), job)); params.setModel(ReflectData.get()); if (job.get(AvroJob.INPUT_SCHEMA) != null) params.setSchema(AvroJob.getInputSchema(job)); return new RecordReader<AvroWrapper<T>, NullWritable>() { private AvroColumnReader<T> reader = new AvroColumnReader<>(params); private float rows = reader.getRowCount(); private long row; public AvroWrapper<T> createKey() { return new AvroWrapper<>(null); } public NullWritable createValue() { return NullWritable.get(); } public boolean next(AvroWrapper<T> wrapper, NullWritable ignore) throws IOException { if (!reader.hasNext()) return false; wrapper.datum(reader.next()); row++; return true; } public float getProgress() throws IOException { return row / rows; } public long getPos() throws IOException { return row; } public void close() throws IOException { reader.close(); } }; }
@Test public void testTrevniEvolvedRead() throws IOException { AvroColumnWriter<GenericRecord> acw = new AvroColumnWriter<>(writer, new ColumnFileMetaData()); acw.write(writtenRecord); File serializedTrevni = File.createTempFile("trevni", null); acw.writeTo(serializedTrevni); AvroColumnReader.Params params = new Params(serializedTrevni); params.setSchema(evolved); AvroColumnReader<GenericRecord> acr = new AvroColumnReader<>(params); GenericRecord readRecord = acr.next(); Assert.assertEquals(evolvedRecord, readRecord); Assert.assertFalse(acr.hasNext()); }
params.setModel(ReflectData.get()); if (job.get(AvroJob.INPUT_SCHEMA) != null) params.setSchema(AvroJob.getInputSchema(job));
private void checkRead(Schema s, List<Object> data) throws Exception { AvroColumnReader<Object> reader = new AvroColumnReader<Object>(new AvroColumnReader.Params(FILE) .setSchema(s)); try { for (Object datum : data) assertEquals(datum, reader.next()); } finally { reader.close(); } }
private void checkRead(Schema schema) throws IOException { AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(FILE) .setSchema(schema)); for (Object expected : new RandomData(schema, COUNT, SEED)) assertEquals(expected, reader.next()); reader.close(); }
private void checkRead(Schema s, List<Object> data) throws Exception { AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(FILE) .setSchema(s)); try { for (Object datum : data) assertEquals(datum, reader.next()); } finally { reader.close(); } }