@Override public void validate(DescriptorProperties properties) { properties.validateString(FORMAT_TYPE, false, 1); properties.validateString(FORMAT_PROPERTY_VERSION, true, 1); } }
/** * Validates a string property. The boundaries are inclusive. */ public void validateString(String key, boolean isOptional, int minLen) { validateString(key, isOptional, minLen, Integer.MAX_VALUE); }
/** * Validates a string property. */ public void validateString(String key, boolean isOptional) { validateString(key, isOptional, 0, Integer.MAX_VALUE); }
private void validateGeneralProperties(DescriptorProperties properties) { properties.validateString(CONNECTOR_INDEX, false, 1); properties.validateString(CONNECTOR_DOCUMENT_TYPE, false, 1); properties.validateString(CONNECTOR_KEY_DELIMITER, true); properties.validateString(CONNECTOR_KEY_NULL_LITERAL, true); }
@Override protected void validate(DescriptorProperties properties) { properties.validateString(TABLES_QUERY, false, 1); }
@Override protected void validate(DescriptorProperties properties) { properties.validateString(TABLES_HISTORY_TABLE, false, 1); properties.validateArray( TABLES_PRIMARY_KEY, (key) -> properties.validateString(key, false, 1), 1, 1); // currently, composite primary keys are not supported properties.validateString(TABLES_TIME_ATTRIBUTE, false, 1); } }
private void validateKafkaProperties(DescriptorProperties properties) { final Map<String, Consumer<String>> propertyValidators = new HashMap<>(); propertyValidators.put( CONNECTOR_PROPERTIES_KEY, key -> properties.validateString(key, false, 1)); propertyValidators.put( CONNECTOR_PROPERTIES_VALUE, key -> properties.validateString(key, false, 0)); properties.validateFixedIndexedProperties(CONNECTOR_PROPERTIES, true, propertyValidators); }
private void validateHosts(DescriptorProperties properties) { final Map<String, Consumer<String>> hostsValidators = new HashMap<>(); hostsValidators.put(CONNECTOR_HOSTS_HOSTNAME, (key) -> properties.validateString(key, false, 1)); hostsValidators.put(CONNECTOR_HOSTS_PORT, (key) -> properties.validateInt(key, false, 0, 65535)); hostsValidators.put(CONNECTOR_HOSTS_PROTOCOL, (key) -> properties.validateString(key, false, 1)); properties.validateFixedIndexedProperties(CONNECTOR_HOSTS, false, hostsValidators); }
private void validateConnectionProperties(DescriptorProperties properties) { properties.validateInt(CONNECTOR_CONNECTION_MAX_RETRY_TIMEOUT, true, 1); properties.validateString(CONNECTOR_CONNECTION_PATH_PREFIX, true); } }
@Override public void validate(DescriptorProperties properties) { properties.validateString(CONNECTOR_TYPE, false, 1); properties.validateInt(CONNECTOR_PROPERTY_VERSION, true, 0); } }
/** * Validates a table schema property. */ public void validateTableSchema(String key, boolean isOptional) { final Consumer<String> nameValidation = (name) -> validateString(name, false, 1); final Consumer<String> typeValidation = (name) -> validateType(name, false, false); final Map<String, Consumer<String>> subKeys = new HashMap<>(); subKeys.put(TABLE_SCHEMA_NAME, nameValidation); subKeys.put(TABLE_SCHEMA_TYPE, typeValidation); validateFixedIndexedProperties( key, isOptional, subKeys ); }
private void validateFailureHandler(DescriptorProperties properties) { final Map<String, Consumer<String>> failureHandlerValidators = new HashMap<>(); failureHandlerValidators.put(CONNECTOR_FAILURE_HANDLER_VALUE_FAIL, noValidation()); failureHandlerValidators.put(CONNECTOR_FAILURE_HANDLER_VALUE_IGNORE, noValidation()); failureHandlerValidators.put(CONNECTOR_FAILURE_HANDLER_VALUE_RETRY, noValidation()); failureHandlerValidators.put(CONNECTOR_FAILURE_HANDLER_VALUE_CUSTOM, key -> properties.validateString(CONNECTOR_FAILURE_HANDLER_CLASS, false, 1)); properties.validateEnum(CONNECTOR_FAILURE_HANDLER, true, failureHandlerValidators); }
private void validateSinkPartitioner(DescriptorProperties properties) { final Map<String, Consumer<String>> sinkPartitionerValidators = new HashMap<>(); sinkPartitionerValidators.put(CONNECTOR_SINK_PARTITIONER_VALUE_FIXED, noValidation()); sinkPartitionerValidators.put(CONNECTOR_SINK_PARTITIONER_VALUE_ROUND_ROBIN, noValidation()); sinkPartitionerValidators.put( CONNECTOR_SINK_PARTITIONER_VALUE_CUSTOM, key -> properties.validateString(CONNECTOR_SINK_PARTITIONER_CLASS, false, 1)); properties.validateEnum(CONNECTOR_SINK_PARTITIONER, true, sinkPartitionerValidators); }
@Override protected void validate(DescriptorProperties properties) { properties.validateLong(DEPLOYMENT_RESPONSE_TIMEOUT, true, 0); properties.validateString(DEPLOYMENT_GATEWAY_ADDRESS, true, 0); properties.validateInt(DEPLOYMENT_GATEWAY_PORT, true, 0, 65535); }
private static FunctionEntry create(DescriptorProperties properties) { properties.validateString(FUNCTIONS_NAME, false, 1); final String name = properties.getString(FUNCTIONS_NAME); final DescriptorProperties cleanedProperties = properties.withoutKeys(Collections.singletonList(FUNCTIONS_NAME)); return new FunctionEntry(name, cleanedProperties); }
@Override public void validate(DescriptorProperties properties) { super.validate(properties); final boolean hasRecordClass = properties.containsKey(FORMAT_RECORD_CLASS); final boolean hasAvroSchema = properties.containsKey(FORMAT_AVRO_SCHEMA); if (hasRecordClass && hasAvroSchema) { throw new ValidationException("A definition of both a schema and Avro schema is not allowed."); } else if (hasRecordClass) { properties.validateString(FORMAT_RECORD_CLASS, false, 1); } else if (hasAvroSchema) { properties.validateString(FORMAT_AVRO_SCHEMA, false, 1); } else { throw new ValidationException("A definition of an Avro specific record class or Avro schema is required."); } } }
@Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_KAFKA, false); properties.validateString(CONNECTOR_TOPIC, false, 1, Integer.MAX_VALUE); validateStartupMode(properties); validateKafkaProperties(properties); validateSinkPartitioner(properties); }
private static TableEntry create(DescriptorProperties properties) { properties.validateString(TABLES_NAME, false, 1); properties.validateEnumValues( TABLES_TYPE,
@Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateBoolean(FORMAT_DERIVE_SCHEMA, true); final boolean deriveSchema = properties.getOptionalBoolean(FORMAT_DERIVE_SCHEMA).orElse(false); final boolean hasSchema = properties.containsKey(FORMAT_SCHEMA); final boolean hasSchemaString = properties.containsKey(FORMAT_JSON_SCHEMA); if (deriveSchema && (hasSchema || hasSchemaString)) { throw new ValidationException( "Format cannot define a schema and derive from the table's schema at the same time."); } else if (!deriveSchema && hasSchema && hasSchemaString) { throw new ValidationException("A definition of both a schema and JSON schema is not allowed."); } else if (!deriveSchema && !hasSchema && !hasSchemaString) { throw new ValidationException("A definition of a schema or JSON schema is required."); } else if (hasSchema) { properties.validateType(FORMAT_SCHEMA, false, true); } else if (hasSchemaString) { properties.validateString(FORMAT_JSON_SCHEMA, false, 1); } properties.validateBoolean(FORMAT_FAIL_ON_MISSING_FIELD, true); } }
/** * Validates a string property. */ public void validateString(String key, boolean isOptional) { validateString(key, isOptional, 0, Integer.MAX_VALUE); }