@Test public void testAllowNullWithNullableAnnotation() { Schema withNullable = ReflectData.AllowNull.get() .getSchema(AllowNullWithNullable.class); Assert.assertEquals("Should produce a nullable double", nullableSchema(double.class), withNullable.getField("aDouble").schema()); Schema nullableDoubleOrLong = Schema.createUnion(Arrays.asList( Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.LONG))); Assert.assertEquals("Should add null to a non-null union", nullableDoubleOrLong, withNullable.getField("doubleOrLong").schema()); Assert.assertEquals("Should add null to a non-null union", nullableDoubleOrLong, withNullable.getField("doubleOrLongOrNull1").schema()); Schema doubleOrLongOrNull = Schema.createUnion(Arrays.asList( Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.NULL))); Assert.assertEquals("Should add null to a non-null union", doubleOrLongOrNull, withNullable.getField("doubleOrLongOrNull2").schema()); Assert.assertEquals("Should add null to a non-null union", doubleOrLongOrNull, withNullable.getField("doubleOrLongOrNull3").schema()); }
@Override protected Schema createFieldSchema(Field field, Map<String, Schema> names) { Schema schema = super.createFieldSchema(field, names); if (field.getType().isPrimitive()) { // for primitive values, such as int, a null will result in a // NullPointerException at read time return schema; } return makeNullable(schema); } }
if (c.isPrimitive() || Number.class.isAssignableFrom(c) || c == Void.class || c == Boolean.class) // primitive return super.createSchema(type, names); if (c.isArray()) { // array Class component = c.getComponentType(); return super.createSchema(type, names); } else { // record List<Schema.Field> fields = new ArrayList<Schema.Field>(); return super.createSchema(type, names);
@Test public void testReadNullsWithPrimitivesAllowNullSchema() { final String name = "allowNullPrimitives"; try { repo.create(NAMESPACE, name, new DatasetDescriptor.Builder() .schema(ReflectData.AllowNull.get().getSchema(ObjectPoJo.class)) .build(), ObjectPoJo.class); // should load the dataset because PrimitivePoJo can be used to write final Dataset<PrimitivePoJo> dataset = repo.load( NAMESPACE, name, PrimitivePoJo.class); TestHelpers.assertThrows("AllowNull primitives cannot read nullable type", IncompatibleSchemaException.class, new Runnable() { @Override public void run() { dataset.newReader(); } }); } catch (RuntimeException e) { throw e; } finally { repo.delete(NAMESPACE, name); } }
/** * Returns a {@link GenericData} instance based on the mode type. * @return a {@link GenericData} instance based on the mode type. */ public GenericData getData() { if (factory != null) { return factory.getData(); } switch(this.modeType) { case REFLECT: return ReflectData.AllowNull.get(); case SPECIFIC: return SpecificData.get(); default: return GenericData.get(); } }
public ReflectData getReflectData() { return ReflectData.AllowNull.get(); }
public ReflectData getReflectData() { return ReflectData.AllowNull.get(); }
@Override public Object getField(Object record, String name, int position) { if (record instanceof IndexedRecord) return super.getField(record, name, position); try { return getField(record.getClass(), name).get(record); } catch (IllegalAccessException e) { throw new AvroRuntimeException(e); } }
@Test public void testCreateWithAllowNullSchema() { String name = "allowNull"; try { repo.create(NAMESPACE, name, new DatasetDescriptor.Builder() .schema(ReflectData.AllowNull.get().getSchema(ObjectPoJo.class)) .build()); } catch (RuntimeException e) { throw e; } finally { repo.delete(NAMESPACE, name); } }
@Override protected Schema createFieldSchema(Field field, Map<String, Schema> names) { Schema schema = super.createFieldSchema(field, names); return makeNullable(schema); } }
protected Schema createFieldSchema(Field field, Map<String, Schema> names) { Schema schema = super.createFieldSchema(field, names); return makeNullable(schema); } }
protected Schema createFieldSchema(Field field, Map<String, Schema> names) { Schema schema = super.createFieldSchema(field, names); return makeNullable(schema); } }
@Override public ReflectData getData() { return ReflectData.AllowNull.get(); }
@Override protected String getSchemaName(Object datum) { if (datum != null) { if(byte[].class.isAssignableFrom(datum.getClass())) { return Schema.Type.BYTES.getName(); } } return super.getSchemaName(datum); } }
@Override protected Schema createFieldSchema(Field field, Map<String, Schema> names) { Schema schema = super.createFieldSchema(field, names); if (field.getType().isPrimitive()) { // for primitive values, such as int, a null will result in a // NullPointerException at read time return schema; } return makeNullable(schema); } }
@Test public void testNull() throws IOException { File file = new File(DIR.getRoot().getPath(), "testNull.avro"); CheckList<BarRecord> check = new CheckList<>(); try(FileOutputStream fos = new FileOutputStream(file)) { ReflectData reflectData = ReflectData.AllowNull.get(); Schema schema = reflectData.getSchema(BarRecord.class); try(DataFileWriter<BarRecord> writer = new DataFileWriter<>(new ReflectDatumWriter<>(BarRecord.class, reflectData))) { writer.create(schema, fos); // test writing to a file write(writer, new BarRecord("One beer please"), check); // null record here, fails when using the default reflectData instance write(writer, new BarRecord(), check); write(writer, new BarRecord("Two beers please"), check); } } ReflectDatumReader<BarRecord> din = new ReflectDatumReader<>(); try(SeekableFileInput sin = new SeekableFileInput(file)) { try (DataFileReader<BarRecord> reader = new DataFileReader<>(sin, din)) { int count = 0; for (BarRecord datum : reader) { check.assertEquals(datum, count++); } Assert.assertEquals(count, check.size()); } } }
@Test public void testWrappers() { // AllowNull only makes fields nullable, so testing must use a base record Schema wrappers = ReflectData.AllowNull.get().getSchema(Wrappers.class); Assert.assertEquals(nullableSchema(boolean.class), wrappers.getField("aBoolean").schema()); Assert.assertEquals(nullableSchema(byte.class), wrappers.getField("aByte").schema()); Assert.assertEquals(nullableSchema(short.class), wrappers.getField("aShort").schema()); Assert.assertEquals(nullableSchema(int.class), wrappers.getField("anInt").schema()); Assert.assertEquals(nullableSchema(long.class), wrappers.getField("aLong").schema()); Assert.assertEquals(nullableSchema(float.class), wrappers.getField("aFloat").schema()); Assert.assertEquals(nullableSchema(double.class), wrappers.getField("aDouble").schema()); Assert.assertEquals(nullableSchema(Primitives.class), wrappers.getField("anObject").schema()); }
@Test public void testRecordWithNullIO() throws IOException { ReflectData reflectData = ReflectData.AllowNull.get(); Schema schm = reflectData.getSchema(AnotherSampleRecord.class); ReflectDatumWriter<AnotherSampleRecord> writer = new ReflectDatumWriter<>(schm); ByteArrayOutputStream out = new ByteArrayOutputStream(); // keep record.a null and see if that works Encoder e = factory.directBinaryEncoder(out, null); AnotherSampleRecord a = new AnotherSampleRecord(); writer.write(a, e); AnotherSampleRecord b = new AnotherSampleRecord(10); writer.write(b, e); e.flush(); ReflectDatumReader<AnotherSampleRecord> reader = new ReflectDatumReader<>(schm); ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); Decoder d = DecoderFactory.get().binaryDecoder(in, null); AnotherSampleRecord decoded = reader.read(null, d); assertEquals(a, decoded); decoded = reader.read(null, d); assertEquals(b, decoded); }
/** * Test serialization of non-string map-key POJOs */ public <T> byte[] testSerialization(String testType, T ... entityObjs) throws Exception { log ("---- Beginning " + testType + " ----"); T entityObj1 = entityObjs[0]; ReflectData rdata = ReflectData.AllowNull.get(); Schema schema = rdata.getSchema(entityObj1.getClass()); assertNotNull("Unable to get schema for " + testType, schema); log (schema.toString(true)); ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter (entityObj1.getClass(), rdata); DataFileWriter<T> fileWriter = new DataFileWriter<>(datumWriter); ByteArrayOutputStream baos = new ByteArrayOutputStream(); fileWriter.create(schema, baos); for (T entityObj : entityObjs) { fileWriter.append(entityObj); } fileWriter.close(); byte[] bytes = baos.toByteArray(); return bytes; }