@SuppressWarnings("unchecked") @Override public GenericRow deserialize(final String topic, final byte[] bytes) { try { final SchemaAndValue schemaAndValue = converter.toConnectData(topic, bytes); return connectToKsqlTranslator.toKsqlRow(schemaAndValue.schema(), schemaAndValue.value()); } catch (final Exception e) { recordLogger.error( ProcessingLogMessageFactory.deserializationErrorMsg(e, Optional.ofNullable(bytes))); throw e; } }
/** {@inheritDoc} */ @Override public SchemaAndValue toConnectData(String topic, byte[] bytes) { CacheEvent evt; try { evt = deserializer.deserialize(topic, bytes); } catch (SerializationException e) { throw new DataException("Failed to convert to Kafka Connect data due to a serialization error", e); } if (evt == null) { return SchemaAndValue.NULL; } return new SchemaAndValue(null, evt); } }
@SuppressWarnings("unchecked") private GenericRow getGenericRow(final byte[] rowJsonBytes) { final SchemaAndValue schemaAndValue = jsonConverter.toConnectData("topic", rowJsonBytes); final Map<String, Object> valueMap = (Map) schemaAndValue.value(); if (valueMap == null) { return null; } final Map<String, String> caseInsensitiveFieldNameMap = getCaseInsensitiveFieldNameMap(valueMap, true); final List<Object> columns = new ArrayList(schema.fields().size()); for (final Field field : schema.fields()) { final Object columnVal = valueMap.get(caseInsensitiveFieldNameMap.get(field.name())); columns.add(enforceFieldType(field.schema(), columnVal)); } return new GenericRow(columns); }
SchemaAndValue tmp = convertToNativeType(element); if (arraySchema == null) { arraySchema = tmp.schema(); objs.add(tmp.value()); return new SchemaAndValue(SchemaBuilder.array(arraySchema), objs); return SchemaAndValue.NULL; } else if (cls.isAssignableFrom(String.class)) { return new SchemaAndValue(SchemaBuilder.string().optional(), value); } else if (cls.isAssignableFrom(Short.class)) { return new SchemaAndValue(SchemaBuilder.int16().optional(), value); } else if (cls.isAssignableFrom(Integer.class)) { return new SchemaAndValue(SchemaBuilder.int32().optional(), value); } else if (cls.isAssignableFrom(Long.class)) { return new SchemaAndValue(SchemaBuilder.int64().optional(), value); } else if (cls.isAssignableFrom(Byte.class)) { return new SchemaAndValue(SchemaBuilder.int8().optional(), value); } else if (cls.isAssignableFrom(Character.class)) { return new SchemaAndValue(SchemaBuilder.int32().optional(), value == null ? null : new Integer(((char) value))); } else if (cls.isAssignableFrom(Boolean.class)) { return new SchemaAndValue(SchemaBuilder.bool().optional(), value); } else if (cls.isAssignableFrom(Float.class)) { return new SchemaAndValue(SchemaBuilder.float32().optional(), value); } else if (cls.isAssignableFrom(BigDecimal.class)) { return new SchemaAndValue(SchemaBuilder.float64().optional(), value == null ? null : ((BigDecimal) value).doubleValue());
return new SchemaAndValue(Schema.STRING_SCHEMA, sb.toString()); return new SchemaAndValue(listSchema, result); result.add(element.value()); parser.canConsume(COMMA_DELIMITER); return new SchemaAndValue(mapSchema, result); if (key == null || key.value() == null) { throw new DataException("Map entry may not have a null key: " + parser.original()); Object entryValue = value != null ? value.value() : null; result.put(key.value(), entryValue); parser.canConsume(COMMA_DELIMITER); keySchema = commonSchemaFor(keySchema, key); return new SchemaAndValue(Schema.INT8_SCHEMA, decimal.byteValueExact()); } catch (ArithmeticException e) { return new SchemaAndValue(Schema.INT16_SCHEMA, decimal.shortValueExact()); } catch (ArithmeticException e) { return new SchemaAndValue(Schema.INT32_SCHEMA, decimal.intValueExact()); } catch (ArithmeticException e) { return new SchemaAndValue(Schema.INT64_SCHEMA, decimal.longValueExact()); } catch (ArithmeticException e) {
@Override public Schema schema() { Schema schema = schemaAndValue.schema(); if (schema == null && value() instanceof Struct) { schema = ((Struct) value()).schema(); } return schema; }
protected void addRecord(List<SourceRecord> records, SchemaAndValue key, SchemaAndValue value) { if (this.config.hasKeyMetadataField && !SchemaAndValue.NULL.equals(key)) { final Struct keyStruct = (Struct) key.value(); keyStruct.put(this.config.keyMetadataField, this.metadata); if (this.config.hasvalueMetadataField && !SchemaAndValue.NULL.equals(value)) { valueStruct = (Struct) value.value(); valueStruct.put(this.config.valueMetadataField, this.metadata); } else {
if (value instanceof Record) { final Record record = (Record) value; final Object ksqlValue = avroData.toConnectData(record.getSchema(), record).value(); genericRowValues.add( SchemaUtil.getOptionalValue(ksqlSchema.field(field.name()).schema(), ksqlValue)); randomAvroMessage.get(key)).value().toString();
.map(opcData -> { SchemaAndValue tmp = CommonUtils.convertToNativeType(opcData.getValue()); Schema valueSchema = CommonUtils.buildSchema(tmp.schema()); TagInfo meta = tagInfoMap.get(opcData.getTag()); Map<String, Object> additionalInfo = new HashMap<>();
/** * Assert that the supplied {@link Struct} is {@link Struct#validate() valid} and its {@link Struct#schema() schema} * matches that of the supplied {@code schema}. * * @param schemaAndValue the value with a schema; may not be null */ public static void schemaMatchesStruct(SchemaAndValue schemaAndValue) { Object value = schemaAndValue.value(); if (value == null) { // The schema should also be null ... assertThat(schemaAndValue.schema()).isNull(); } else { // Both value and schema should exist and be valid ... assertThat(value).isInstanceOf(Struct.class); fieldsInSchema((Struct) value, schemaAndValue.schema()); } }
public static Supplier<SchemaAndValue> deserializationErrorMsg( final Throwable exception, final Optional<byte[]> record) { Objects.requireNonNull(exception); return () -> { final Struct struct = new Struct(PROCESSING_LOG_SCHEMA); final Struct deserializationError = new Struct(DESERIALIZATION_ERROR_SCHEMA); deserializationError.put(DESERIALIZATION_ERROR_FIELD_MESSAGE, exception.getMessage()); deserializationError.put( DESERIALIZATION_ERROR_FIELD_RECORD, record.map(Base64.getEncoder()::encodeToString).orElse(null) ); struct.put(DESERIALIZATION_ERROR, deserializationError); struct.put(TYPE, MessageType.DESERIALIZATION_ERROR.ordinal()); return new SchemaAndValue(PROCESSING_LOG_SCHEMA, struct); }; } }
@Test public void shouldSetNullRecordToNull() { // When: final SchemaAndValue msg = ProcessingLogMessageFactory.deserializationErrorMsg( error, Optional.empty() ).get(); // Then: final Struct struct = (Struct) msg.value(); final Struct deserializationError = struct.getStruct(DESERIALIZATION_ERROR); assertThat(deserializationError.get(DESERIALIZATION_ERROR_FIELD_RECORD), is(nullValue())); }
TagInfo meta = tagInfoMap.get(opcData.getTag()); SchemaAndValue dataSchema = CommonUtils.convertToNativeType(opcData.getValue()); Schema valueSchema = CommonUtils.buildSchema(dataSchema.schema()); Struct valueStruct = CommonUtils.mapToConnectObject(opcData, meta,
@Before public void setup() { connectDeserializer = new KsqlConnectDeserializer( converter, dataTranslator, recordLogger ); when(converter.toConnectData(any(), any())).thenReturn(new SchemaAndValue(schema, value)); when(dataTranslator.toKsqlRow(any(), any())).thenReturn(genericRow); }
@Override public Object value() { return schemaAndValue.value(); }
private SourceRecord rehydrateSourceRecord(Document record, SchemaAndValueConverter keyConverter, SchemaAndValueConverter valueConverter) throws IOException { Document sourcePartitionDoc = record.getDocument("sourcePartition"); Document sourceOffsetDoc = record.getDocument("sourceOffset"); String topic = record.getString("topic"); Integer kafkaPartition = record.getInteger("kafkaPartition"); Document keySchema = record.getDocument("keySchema"); Document valueSchema = record.getDocument("valueSchema"); Document key = record.getDocument("key"); Document value = record.getDocument("value"); Document keyAndSchemaDoc = Document.create("schema", keySchema, "payload", key); Document valueAndSchemaDoc = Document.create("schema", valueSchema, "payload", value); SchemaAndValue keyWithSchema = keyConverter.deserialize(topic, keyAndSchemaDoc); SchemaAndValue valueWithSchema = valueConverter.deserialize(topic, valueAndSchemaDoc); Map<String, ?> sourcePartition = toMap(sourcePartitionDoc); Map<String, ?> sourceOffset = toMap(sourceOffsetDoc); return new SourceRecord(sourcePartition, sourceOffset, topic, kafkaPartition, keyWithSchema.schema(), keyWithSchema.value(), valueWithSchema.schema(), valueWithSchema.value()); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { return new SchemaAndValue(Schema.BYTES_SCHEMA, value); } }
protected long process(SchemaAndValue schemaAndValue) { final long result; if (schemaAndValue.value() instanceof Struct) { result = processStruct(schemaAndValue); } else if (schemaAndValue.value() instanceof Map) { result = processMap(schemaAndValue); } else { throw new UnsupportedOperationException(); } return result; }