/** * A constructor for use by test cases ONLY, thus the default scope. * @param url The complete URL reference of a confluent schema registry, e.g. "http://HOST:PORT" */ ConfluentAvroSerializer(String url) { this.url = url; this.theClient = new CachedSchemaRegistryClient(this.url, 10000); }
/** * A constructor with a signature that Storm can locate and use with kryo registration. * See Storm's SerializationFactory class for details * * @param k Unused but needs to be present for Serialization Factory to find this constructor * @param topoConf The global storm configuration. Must define "avro.schemaregistry.confluent" to locate the * confluent schema registry. Should in the form of "http://HOST:PORT" */ public ConfluentAvroSerializer(Kryo k, Map<String, Object> topoConf) { url = (String) topoConf.get("avro.schemaregistry.confluent"); this.theClient = new CachedSchemaRegistryClient(this.url, 10000); }
@Override public SchemaCoder get() { return new ConfluentSchemaRegistryCoder(new CachedSchemaRegistryClient( url, identityMapCapacity)); } }
@JsonCreator public SchemaRegistryBasedAvroBytesDecoder( @JsonProperty("url") String url, @JsonProperty("capacity") Integer capacity ) { int identityMapCapacity = capacity == null ? Integer.MAX_VALUE : capacity; this.registry = new CachedSchemaRegistryClient(url, identityMapCapacity); }
public ConfluentKafkaSchemaRegistry(Properties props) { this(props, new CachedSchemaRegistryClient(props.getProperty(KAFKA_SCHEMA_REGISTRY_URL), Integer.parseInt(props.getProperty(CONFLUENT_MAX_SCHEMAS_PER_SUBJECT, String.valueOf(Integer.MAX_VALUE))))); }
public SecorSchemaRegistryClient(SecorConfig config) { try { Properties props = new Properties(); props.put("schema.registry.url", config.getSchemaRegistryUrl()); schemaRegistryClient = new CachedSchemaRegistryClient(config.getSchemaRegistryUrl(), 30); init(config); } catch (Exception e){ LOG.error("Error initalizing schema registry", e); throw new RuntimeException(e); } }
public AvroProducer(final KsqlConfig ksqlConfig) { if (ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY) == null) { throw new KsqlException("Schema registry url is not set."); } this.ksqlConfig = ksqlConfig; this.schemaRegistryClient = new CachedSchemaRegistryClient( ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY), 100 ); }
public SchemaRegistryClient client(final String url) { if (schemaRegistryClient == null) { schemaRegistryClient = new CachedSchemaRegistryClient(url, 100); } return schemaRegistryClient; }
/** * A constructor for use by test cases ONLY, thus the default scope. * @param url The complete URL reference of a confluent schema registry, e.g. "http://HOST:PORT" */ ConfluentAvroSerializer(String url) { this.url = url; this.theClient = new CachedSchemaRegistryClient(this.url, 10000); }
/** * A constructor with a signature that Storm can locate and use with kryo registration. * See Storm's SerializationFactory class for details * * @param k Unused but needs to be present for Serialization Factory to find this constructor * @param stormConf The global storm configuration. Must define "avro.schemaregistry.confluent" to locate the * confluent schema registry. Should in the form of "http://HOST:PORT" */ public ConfluentAvroSerializer(Kryo k, Map stormConf) { url = (String) stormConf.get("avro.schemaregistry.confluent"); this.theClient = new CachedSchemaRegistryClient(this.url, 10000); }
@JsonCreator public SchemaRegistryBasedAvroBytesDecoder( @JsonProperty("url") String url, @JsonProperty("capacity") Integer capacity ) { int identityMapCapacity = capacity == null ? Integer.MAX_VALUE : capacity; this.registry = new CachedSchemaRegistryClient(url, identityMapCapacity); }
@Override public void configure(Map<String, Object> configs) { Preconditions.checkState(configs.containsKey(SCHEMA_REGISTRY_URL_CONFIG)); String schemaRegistryUrl = (String) configs.get(SCHEMA_REGISTRY_URL_CONFIG); schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryUrl, 100); } }
public ConfluentKafkaSchemaRegistry(Properties props) { this(props, new CachedSchemaRegistryClient(props.getProperty(KAFKA_SCHEMA_REGISTRY_URL), Integer.parseInt(props.getProperty(CONFLUENT_MAX_SCHEMAS_PER_SUBJECT, String.valueOf(Integer.MAX_VALUE))))); }
@Override public void init(Properties props, String topicName) { super.init(props, topicName); decoderFactory = DecoderFactory.get(); if (props == null) { throw new IllegalArgumentException("Missing schema registry url!"); } String baseUrl = props.getProperty(SCHEMA_REGISTRY_URL); if (baseUrl == null) { throw new IllegalArgumentException("Missing schema registry url!"); } String maxSchemaObject = props.getProperty( MAX_SCHEMAS_PER_SUBJECT, DEFAULT_MAX_SCHEMAS_PER_SUBJECT); if (schemaRegistry == null) { schemaRegistry = new CachedSchemaRegistryClient(baseUrl, Integer.parseInt(maxSchemaObject)); } this.isNew = Boolean.parseBoolean(props.getProperty(IS_NEW_PRODUCER, "true")); this.topic = topicName; }
/** * Create an AvroConfluent codec * * @param messageClass the class to encode and decode * @param schemaRegistryUrls a comma separated list of Confluent Schema Registry URL */ public AvroConfluentCodec(Class<T> messageClass, String schemaRegistryUrls) { this.messageClass = messageClass; schema = ReflectData.get().getSchema(messageClass); schemaName = messageClass.getName(); if (schemaRegistryUrls.contains(",")) { client = new CachedSchemaRegistryClient(Arrays.asList(schemaRegistryUrls.split(",")), DEFAULT_IDENTITY_MAP_CAPACITY); } else { client = new CachedSchemaRegistryClient(schemaRegistryUrls, DEFAULT_IDENTITY_MAP_CAPACITY); } try { this.schemaId = client.register(messageClass.getName(), schema); } catch (RestClientException | IOException e) { throw new StreamRuntimeException(e); } this.serializer = new KafkaAvroSerializer(client); this.encoder = new RawMessageEncoder<>(ReflectData.get(), schema); }
@Override public void configure(Map<String, String> properties) { SchemaRegistrySchemaRetrieverConfig config = new SchemaRegistrySchemaRetrieverConfig(properties); schemaRegistryClient = new CachedSchemaRegistryClient(config.getString(config.LOCATION_CONFIG), 0); avroData = new AvroData(config.getInt(config.AVRO_DATA_CACHE_SIZE_CONFIG)); }