private GenericAvroRegistry() { // Ensure that we know how to get Schemas for these Salesforce objects. registerSchemaInferrer(IndexedRecord.class, new SerializableFunction<IndexedRecord, Schema>() { /** Default serial version UID. */ private static final long serialVersionUID = 1L; @Override public Schema apply(IndexedRecord t) { return inferSchemaRecord(t); } }); registerSchemaInferrer(Schema.Field.class, new SerializableFunction<Schema.Field, Schema>() { /** Default serial version UID. */ private static final long serialVersionUID = 1L; @Override public Schema apply(Schema.Field t) { return inferSchemaField(t); } }); }
@SuppressWarnings("unchecked") @Override public Object get(int i) { // Lazy initialization of the cached converter objects. if (names == null) { names = new String[getSchema().getFields().size()]; fieldConverter = new AvroConverter[names.length]; for (int j = 0; j < names.length; j++) { Schema.Field f = getSchema().getFields().get(j); names[j] = f.name(); fieldConverter[j] = GenericAvroRegistry.get().convertToString(f); } } return fieldConverter[i].convertToDatum(value.get(getSchema().getField(names[i]).pos())); } }
private Schema inferSchemaRecord(IndexedRecord in) { FieldAssembler<Schema> builder = SchemaBuilder.builder().record(in.getSchema().getName()).fields(); for (Schema.Field field : in.getSchema().getFields()) { Schema fieldSchema = inferSchema(field); Object fieldDefault = field.defaultVal(); if (null == fieldDefault) { builder = builder.name(field.name()).type(fieldSchema).noDefault(); } else { builder = builder.name(field.name()).type(fieldSchema).withDefault(fieldDefault); } } return builder.endRecord(); }
@SuppressWarnings("unchecked") @Override public Object get(int i) { // Lazy initialization of the cached converter objects. if (names == null) { names = new String[getSchema().getFields().size()]; fieldConverter = new AvroConverter[names.length]; for (int j = 0; j < names.length; j++) { Schema.Field f = getSchema().getFields().get(j); names[j] = f.name(); fieldConverter[j] = GenericAvroRegistry.get().convertToString(f); } } return fieldConverter[i].convertToDatum(value.get(getSchema().getField(names[i]).pos())); } }
private Schema inferSchemaRecord(IndexedRecord in) { FieldAssembler<Schema> builder = SchemaBuilder.builder().record(in.getSchema().getName()).fields(); for (Schema.Field field : in.getSchema().getFields()) { Schema fieldSchema = inferSchema(field); Object fieldDefault = field.defaultVal(); if (null == fieldDefault) { builder = builder.name(field.name()).type(fieldSchema).noDefault(); } else { builder = builder.name(field.name()).type(fieldSchema).withDefault(fieldDefault); } } return builder.endRecord(); }
@Override public void write(Object datum) throws IOException { if (datum == null) { return; } // else handle the data. IndexedRecord input = getFactory(datum).convertToAvro(datum); SplunkJSONEvent event = SplunkJSONEventBuilder.createEvent(); for (Schema.Field f : input.getSchema().getFields()) { Schema.Field defaultField = null; if (defaultSchema != null) { defaultField = defaultSchema.getField(f.name()); } Object inputValue = input.get(f.pos()); if (defaultField != null && defaultField.name().equals(SplunkJSONEventField.TIME.getName()) && inputValue != null && inputValue instanceof String) { Object value = GenericAvroRegistry.get().convertToString(defaultField).convertToAvro((String) inputValue); SplunkJSONEventBuilder.setField(event, f.name(), value, true); } else if (inputValue != null) { SplunkJSONEventBuilder.setField(event, f.name(), inputValue, true); } } LOGGER.debug("Added event to bulk queue." + String.valueOf(event)); splunkObjectsForBulk.add(event); LOGGER.debug("Events bulk queue size " + splunkObjectsForBulk.size()); if (splunkObjectsForBulk.size() >= eventsBatchSize) { doSend(); } }
private GenericAvroRegistry() { // Ensure that we know how to get Schemas for these Salesforce objects. registerSchemaInferrer(IndexedRecord.class, new SerializableFunction<IndexedRecord, Schema>() { /** Default serial version UID. */ private static final long serialVersionUID = 1L; @Override public Schema apply(IndexedRecord t) { return inferSchemaRecord(t); } }); registerSchemaInferrer(Schema.Field.class, new SerializableFunction<Schema.Field, Schema>() { /** Default serial version UID. */ private static final long serialVersionUID = 1L; @Override public Schema apply(Schema.Field t) { return inferSchemaField(t); } }); }
@Override public void write(Object datum) throws IOException { if (datum == null) { return; } // else handle the data. IndexedRecord input = getFactory(datum).convertToAvro(datum); SplunkJSONEvent event = SplunkJSONEventBuilder.createEvent(); for (Schema.Field f : input.getSchema().getFields()) { Schema.Field defaultField = null; if (defaultSchema != null) { defaultField = defaultSchema.getField(f.name()); } Object inputValue = input.get(f.pos()); if (defaultField != null && defaultField.name().equals(SplunkJSONEventField.TIME.getName()) && inputValue != null && inputValue instanceof String) { Object value = GenericAvroRegistry.get().convertToString(defaultField).convertToAvro((String) inputValue); SplunkJSONEventBuilder.setField(event, f.name(), value, true); } else if (inputValue != null) { SplunkJSONEventBuilder.setField(event, f.name(), inputValue, true); } } LOGGER.debug("Added event to bulk queue." + String.valueOf(event)); splunkObjectsForBulk.add(event); LOGGER.debug("Events bulk queue size " + splunkObjectsForBulk.size()); if (splunkObjectsForBulk.size() >= eventsBatchSize) { doSend(); } }