private DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); new StreamTableDescriptorValidator(true, false, true).validate(descriptorProperties); new SchemaValidator(true, false, false).validate(descriptorProperties); new ElasticsearchValidator().validate(descriptorProperties); return descriptorProperties; }
@Override public String toString() { return DescriptorProperties.toString(toProperties()); } }
@Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_ELASTICSEARCH, false); validateVersion(properties); validateHosts(properties); validateGeneralProperties(properties); validateFailureHandler(properties); validateBulkFlush(properties); validateConnectionProperties(properties); }
@Override public final Map<String, String> toProperties() { final DescriptorProperties properties = new DescriptorProperties(); properties.putString(FormatDescriptorValidator.FORMAT_TYPE, type); properties.putInt(FormatDescriptorValidator.FORMAT_PROPERTY_VERSION, version); properties.putProperties(toFormatProperties()); return properties.asMap(); }
@Override public final Map<String, String> toProperties() { final DescriptorProperties properties = new DescriptorProperties(); properties.putString(CONNECTOR_TYPE, type); properties.putLong(CONNECTOR_PROPERTY_VERSION, version); properties.putProperties(toConnectorProperties()); return properties.asMap(); }
protected void addPropertyAndVerify(Descriptor descriptor, String property, String newValue) { final DescriptorProperties properties = new DescriptorProperties(); properties.putProperties(descriptor.toProperties()); final DescriptorProperties copy = properties.withoutKeys(Collections.singletonList(property)); copy.putString(property, newValue); validator().validate(copy); }
private DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); // allow Kafka timestamps to be used, watermarks can not be received from source new SchemaValidator(true, supportsKafkaTimestamps(), false).validate(descriptorProperties); new KafkaValidator().validate(descriptorProperties); return descriptorProperties; }
private static DescriptorProperties getValidatedProperties(Map<String, String> propertiesMap) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(propertiesMap); // validate new AvroValidator().validate(descriptorProperties); return descriptorProperties; } }
private static DescriptorProperties getValidatedProperties(Map<String, String> propertiesMap) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(propertiesMap); // validate new JsonValidator().validate(descriptorProperties); return descriptorProperties; } }
@Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE_KAFKA, false); properties.validateString(CONNECTOR_TOPIC, false, 1, Integer.MAX_VALUE); validateStartupMode(properties); validateKafkaProperties(properties); validateSinkPartitioner(properties); }
/** * Configures a failure handling strategy in case a request to Elasticsearch fails. * * <p>This strategy ignores failures and drops the request. */ public Elasticsearch failureHandlerIgnore() { internalProperties.putString(CONNECTOR_FAILURE_HANDLER, ElasticsearchValidator.CONNECTOR_FAILURE_HANDLER_VALUE_IGNORE); return this; }
@Override protected void validate(DescriptorProperties properties) { properties.validateString(TABLES_HISTORY_TABLE, false, 1); properties.validateArray( TABLES_PRIMARY_KEY, (key) -> properties.validateString(key, false, 1), 1, 1); // currently, composite primary keys are not supported properties.validateString(TABLES_TIME_ATTRIBUTE, false, 1); } }
/** * Validates an integer property. */ public void validateInt(String key, boolean isOptional) { validateInt(key, isOptional, Integer.MIN_VALUE, Integer.MAX_VALUE); }
/** * Validates an long property. */ public void validateLong(String key, boolean isOptional) { validateLong(key, isOptional, Long.MIN_VALUE, Long.MAX_VALUE); }
/** * Configures how to buffer elements before sending them in bulk to the cluster for efficiency. * * <p>Sets the bulk flush interval (in milliseconds). * * @param interval bulk flush interval (in milliseconds). */ public Elasticsearch bulkFlushInterval(long interval) { internalProperties.putLong(CONNECTOR_BULK_FLUSH_INTERVAL, interval); return this; }
/** * Validates an integer property. The boundaries are inclusive. */ public void validateInt(String key, boolean isOptional, int min, int max) { validateComparable(key, isOptional, min, max, "integer", Integer::valueOf); }
/** * Validates a short property. */ public void validateShort(String key, boolean isOptional) { validateShort(key, isOptional, Short.MIN_VALUE, Short.MAX_VALUE); }
protected void removePropertyAndVerify(Descriptor descriptor, String property) { final DescriptorProperties properties = new DescriptorProperties(); properties.putProperties(descriptor.toProperties()); final DescriptorProperties copy = properties.withoutKeys(Collections.singletonList(property)); validator().validate(copy); } }
/** * Validates a float property. The boundaries are inclusive. */ public void validateFloat(String key, boolean isOptional, float min, float max) { validateComparable(key, isOptional, min, max, "float", Float::valueOf); }
protected void verifyProperties(Descriptor descriptor, Map<String, String> expected) { // test produced properties assertEquals(expected, descriptor.toProperties()); // test validation logic final DescriptorProperties properties = new DescriptorProperties(); properties.putProperties(expected); validator().validate(properties); }