/** {@inheritDoc} */ @Override public SchemaAndValue toConnectData(String topic, byte[] bytes) { CacheEvent evt; try { evt = deserializer.deserialize(topic, bytes); } catch (SerializationException e) { throw new DataException("Failed to convert to Kafka Connect data due to a serialization error", e); } if (evt == null) { return SchemaAndValue.NULL; } return new SchemaAndValue(null, evt); } }
public static Supplier<SchemaAndValue> deserializationErrorMsg( final Throwable exception, final Optional<byte[]> record) { Objects.requireNonNull(exception); return () -> { final Struct struct = new Struct(PROCESSING_LOG_SCHEMA); final Struct deserializationError = new Struct(DESERIALIZATION_ERROR_SCHEMA); deserializationError.put(DESERIALIZATION_ERROR_FIELD_MESSAGE, exception.getMessage()); deserializationError.put( DESERIALIZATION_ERROR_FIELD_RECORD, record.map(Base64.getEncoder()::encodeToString).orElse(null) ); struct.put(DESERIALIZATION_ERROR, deserializationError); struct.put(TYPE, MessageType.DESERIALIZATION_ERROR.ordinal()); return new SchemaAndValue(PROCESSING_LOG_SCHEMA, struct); }; } }
@Before public void setup() { connectDeserializer = new KsqlConnectDeserializer( converter, dataTranslator, recordLogger ); when(converter.toConnectData(any(), any())).thenReturn(new SchemaAndValue(schema, value)); when(dataTranslator.toKsqlRow(any(), any())).thenReturn(genericRow); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { return new SchemaAndValue(Schema.BYTES_SCHEMA, value); } }
@Override protected SchemaAndValue processBytes(R record, Schema inputSchema, byte[] input) { return new SchemaAndValue(inputSchema, input); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { return new SchemaAndValue(Schema.BYTES_SCHEMA, value); } }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { return new SchemaAndValue(Schema.OPTIONAL_BYTES_SCHEMA, value); }
@Override protected SchemaAndValue processString(R record, Schema inputSchema, String input) { return new SchemaAndValue(inputSchema, input); }
@Override public Header with(Schema schema, Object value) { return new ConnectHeader(key, new SchemaAndValue(schema, value)); }
@Override public SchemaAndValue toConnectData(final String topic, final byte[] value) { requireNonNull(value); return new SchemaAndValue(null, DESERIALIZER.deserialize(topic, value)); } }
@Override protected SchemaAndValue processBytes(R record, Schema inputSchema, byte[] input) { final Schema outputSchema = inputSchema.isOptional() ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA; final String output = new String(input, this.config.charset); return new SchemaAndValue(outputSchema, output); }
@Override public Headers add(String key, Object value, Schema schema) { return add(key, value != null || schema != null ? new SchemaAndValue(schema, value) : SchemaAndValue.NULL); }
@Override protected SchemaAndValue processMap(R record, Map<String, Object> input) { final Map<String, Object> outputMap = new LinkedHashMap<>(input.size()); for (final String inputFieldName : input.keySet()) { log.trace("process() - Processing field '{}'", inputFieldName); final Matcher fieldMatcher = this.config.pattern.matcher(inputFieldName); final String outputFieldName; if (fieldMatcher.find()) { outputFieldName = fieldMatcher.replaceAll(this.config.replacement); } else { outputFieldName = inputFieldName; } final Object value = input.get(inputFieldName); outputMap.put(outputFieldName, value); } return new SchemaAndValue(null, outputMap); }
protected Headers addWithoutValidating(String key, Object value, Schema schema) { return add(new ConnectHeader(key, new SchemaAndValue(schema, value))); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { try { return new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, deserializer.deserialize(topic, value)); } catch (SerializationException e) { throw new DataException("Failed to deserialize string: ", e); } }
public SchemaAndValue toConnectData(String topic, byte[] value) { try { return new SchemaAndValue(Schema.OPTIONAL_BYTES_SCHEMA, this.deserializer.deserialize(topic, value)); } catch (SerializationException var4) { throw new DataException("Failed to deserialize byte: ", var4); } } }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { try { return new SchemaAndValue(schema, deserializer.deserialize(topic, value)); } catch (SerializationException e) { throw new DataException("Failed to deserialize " + typeName + ": ", e); } }
SchemaAndValue toConnectData(byte[] value) { Message message = getMessage(value); if (message == null) { return SchemaAndValue.NULL; } return new SchemaAndValue(this.schema, toConnectData(this.schema, message)); }
/** * Parse the specified string representation of a value into its schema and value. * * @param value the string form of the value * @return the schema and value; never null, but whose schema and value may be null * @see #convertToString */ public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } Parser parser = new Parser(value); return parse(parser, false); }
@Override public R apply(R r) { final long timestamp = process(new SchemaAndValue(r.valueSchema(), r.value())); return r.newRecord( r.topic(), r.kafkaPartition(), r.keySchema(), r.key(), r.valueSchema(), r.value(), timestamp ); } }