@Override public GenericRow toKsqlRow(final Schema connectSchema, final Object connectData) { if (!schema.type().equals(Schema.Type.STRUCT)) { throw new KsqlException("Schema for a KSQL row should be a struct"); } final Struct rowStruct = (Struct) toKsqlValue(schema, connectSchema, connectData, ""); if (rowStruct == null) { return null; } // streams are expensive, so we don't use them from serdes. build the row using forEach final List<Object> fields = new ArrayList<>(schema.fields().size()); schema.fields().forEach(field -> fields.add(rowStruct.get(field))); return new GenericRow(fields); }
public static String getSchemaTypeAsSqlType(final Schema.Type type) { final String sqlType = TYPE_MAP.get(type.name()); if (sqlType == null) { throw new IllegalArgumentException("Unknown schema type: " + type); } return sqlType; }
if (keySchemaType.isPrimitive()) { if (configuredPkFields.size() != 1) { throw new ConnectException(String.format(
assertThat(keySchema.getString("type")).isEqualToIgnoringCase(Schema.Type.STRUCT.name()); assertThat(keySchema.getBoolean("optional")).isEqualTo(false); Array keySchemaFields = keySchema.getArray("fields"); assertThat(valueSchema.getString("type")).isEqualToIgnoringCase(Schema.Type.STRUCT.name()); assertThat(valueSchema.getBoolean("optional")).isEqualTo(false); Array valueSchemaFields = valueSchema.getArray("fields");
return castToString(value); default: throw new DataException(targetType.toString() + " is not supported in the Cast transformation.");
if (schemaPair.keySchema.type().isPrimitive()) { assert fieldsMetadata.keyFieldNames.size() == 1; bindField(index++, schemaPair.keySchema, record.key());
private static Map<String, Schema.Type> parseFieldTypes(List<String> mappings) { final Map<String, Schema.Type> m = new HashMap<>(); boolean isWholeValueCast = false; for (String mapping : mappings) { final String[] parts = mapping.split(":"); if (parts.length > 2) { throw new ConfigException(ReplaceField.ConfigName.RENAME, mappings, "Invalid rename mapping: " + mapping); } if (parts.length == 1) { Schema.Type targetType = Schema.Type.valueOf(parts[0].trim().toUpperCase(Locale.ROOT)); m.put(WHOLE_VALUE_CAST, validCastType(targetType, FieldType.OUTPUT)); isWholeValueCast = true; } else { Schema.Type type; try { type = Schema.Type.valueOf(parts[1].trim().toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new ConfigException("Invalid type found in casting spec: " + parts[1].trim(), e); } m.put(parts[0].trim(), validCastType(type, FieldType.OUTPUT)); } } if (isWholeValueCast && mappings.size() > 1) { throw new ConfigException("Cast transformations that specify a type to cast the entire value to " + "may ony specify a single cast in their spec"); } return m; }
private static Object projectPrimitive(Schema source, Object record, Schema target) throws SchemaProjectorException { assert source.type().isPrimitive(); assert target.type().isPrimitive(); Object result; if (isPromotable(source.type(), target.type()) && record instanceof Number) {
@Override public String encodePartition(SinkRecord sinkRecord) { Object value = sinkRecord.value(); Schema valueSchema = sinkRecord.valueSchema(); if (value instanceof Struct) { Struct struct = (Struct) value; Object partitionKey = struct.get(fieldName); Type type = valueSchema.field(fieldName).schema().type(); switch (type) { case INT8: case INT16: case INT32: case INT64: Number record = (Number) partitionKey; return fieldName + "=" + record.toString(); case STRING: return fieldName + "=" + (String) partitionKey; case BOOLEAN: boolean booleanRecord = (boolean) partitionKey; return fieldName + "=" + Boolean.toString(booleanRecord); default: log.error("Type {} is not supported as a partition key.", type.getName()); throw new PartitionException("Error encoding partition."); } } else { log.error("Value is not Struct type."); throw new PartitionException("Error encoding partition."); } }
static String schema(Schema schema) { String result; if (!Strings.isNullOrEmpty(schema.name())) { if (Time.LOGICAL_NAME.equals(schema.name())) { result = "[Time](https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Time.html)"; } else if (Date.LOGICAL_NAME.equals(schema.name())) { result = "[Date](https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Date.html)"; } else if (Timestamp.LOGICAL_NAME.equals(schema.name())) { result = "[Timestamp](https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Timestamp.html)"; } else if (Decimal.LOGICAL_NAME.equals(schema.name())) { result = "[Decimal](https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Decimal.html)"; } else { result = String.format("[%s](#%s)", schema.name(), schema.name()); } } else { if (Schema.Type.ARRAY == schema.type()) { result = String.format("Array of %s", schema(schema.valueSchema())); } else if (Schema.Type.MAP == schema.type()) { result = String.format("Map of <%s, %s>", schema(schema.keySchema()), schema(schema.valueSchema())); } else { result = String.format("[%s](https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Schema.Type.html#%s)", CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, schema.type().toString()), schema.type() ); } } return result; }
public static ConfigDef config() { return new ConfigDef() .define( ConfigKeyBuilder.of(OUTPUT_SCHEMA_CONFIG, ConfigDef.Type.STRING) .documentation(OUTPUT_SCHEMA_DOC) .defaultValue(Schema.Type.STRING.toString()) .validator( ConfigDef.ValidString.in( Schema.Type.STRING.toString(), Schema.Type.BYTES.toString() ) ) .importance(ConfigDef.Importance.MEDIUM) .build() ).define( ConfigKeyBuilder.of(SCHEMAS_ENABLE_CONFIG, ConfigDef.Type.BOOLEAN) .documentation(SCHEMAS_ENABLE_DOC) .defaultValue(false) .importance(ConfigDef.Importance.MEDIUM) .build() ); }
private void handleMapField(BsonDocument doc, Struct struct, Field field) { logger.trace("handling complex type 'map'"); BsonDocument bd = new BsonDocument(); if(struct.get(field)==null) { logger.trace("no field in struct -> adding null"); doc.put(field.name(), BsonNull.VALUE); return; } Map m = (Map)struct.get(field); for(Object entry : m.keySet()) { String key = (String)entry; if(field.schema().valueSchema().type().isPrimitive()) { bd.put(key, getConverter(field.schema().valueSchema()).toBson(m.get(key),field.schema())); } else { bd.put(key, toBsonDoc(field.schema().valueSchema(), m.get(key))); } } doc.put(field.name(), bd); }
private void handleMapField(BsonDocument doc, Struct struct, Field field) { logger.trace("handling complex type 'map'"); BsonDocument bd = new BsonDocument(); if(struct.get(field)==null) { logger.trace("no field in struct -> adding null"); doc.put(field.name(), BsonNull.VALUE); return; } Map m = (Map)struct.get(field); for(Object entry : m.keySet()) { String key = (String)entry; if(field.schema().valueSchema().type().isPrimitive()) { bd.put(key, getConverter(field.schema().valueSchema()).toBson(m.get(key),field.schema())); } else { bd.put(key, toBsonDoc(field.schema().valueSchema(), m.get(key))); } } doc.put(field.name(), bd); }
/** * Determine the type/format of the timestamp based on the schema */ private String timestampTypeFromSchema(Schema schema) { if (Timestamp.LOGICAL_NAME.equals(schema.name())) { return TYPE_TIMESTAMP; } else if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(schema.name())) { return TYPE_DATE; } else if (Time.LOGICAL_NAME.equals(schema.name())) { return TYPE_TIME; } else if (schema.type().equals(Schema.Type.STRING)) { // If not otherwise specified, string == user-specified string format for timestamps return TYPE_STRING; } else if (schema.type().equals(Schema.Type.INT64)) { // If not otherwise specified, long == unix time return TYPE_UNIX; } throw new ConnectException("Schema " + schema + " does not correspond to a known timestamp type format"); }
public static <K, V> void validateFormat(Map<K, V> offsetData) { // Both keys and values for offsets may be null. For values, this is a useful way to delete offsets or indicate // that there's not usable concept of offsets in your source system. if (offsetData == null) return; for (Map.Entry<K, V> entry : offsetData.entrySet()) { if (!(entry.getKey() instanceof String)) throw new DataException("Offsets may only use String keys"); Object value = entry.getValue(); if (value == null) continue; Schema.Type schemaType = ConnectSchema.schemaType(value.getClass()); if (schemaType == null) throw new DataException("Offsets may only contain primitive types as values, but field " + entry.getKey() + " contains " + value.getClass()); if (!schemaType.isPrimitive()) throw new DataException("Offsets may only contain primitive types as values, but field " + entry.getKey() + " contains " + schemaType); } } }
private void handleArrayField(BsonDocument doc, Struct struct, Field field) { logger.trace("handling complex type 'array'"); BsonArray array = new BsonArray(); if(struct.get(field)==null) { logger.trace("no field in struct -> adding null"); doc.put(field.name(), BsonNull.VALUE); return; } for(Object element : (List)struct.get(field)) { if(field.schema().valueSchema().type().isPrimitive()) { array.add(getConverter(field.schema().valueSchema()).toBson(element,field.schema())); } else { array.add(toBsonDoc(field.schema().valueSchema(), element)); } } doc.put(field.name(), array); }
private void handleArrayField(BsonDocument doc, Struct struct, Field field) { logger.trace("handling complex type 'array'"); BsonArray array = new BsonArray(); if(struct.get(field)==null) { logger.trace("no field in struct -> adding null"); doc.put(field.name(), BsonNull.VALUE); return; } for(Object element : (List)struct.get(field)) { if(field.schema().valueSchema().type().isPrimitive()) { array.add(getConverter(field.schema().valueSchema()).toBson(element,field.schema())); } else { array.add(toBsonDoc(field.schema().valueSchema(), element)); } } doc.put(field.name(), array); }
Type() { this.name = this.name().toLowerCase(Locale.ROOT); }
private SinkFieldConverter getConverter(Schema schema) { SinkFieldConverter converter; if(isSupportedLogicalType(schema)) { converter = logicalConverters.get(schema.name()); } else { converter = converters.get(schema.type()); } if (converter == null) { throw new ConnectException("error no registered converter found for " + schema.type().getName()); } return converter; } }