private Kafka2TupleMetadata buildAggregationDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
private Kafka2TupleMetadata buildAggregationOutputDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_OUTPUT_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_OUTPUT_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_OUTPUT_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
public void validateAndEnsureDefault() { Preconditions.checkNotNull(streamSource); Preconditions.checkNotNull(streamDefinition); if (streamSource.getType() == null) { streamSource.setType("KAFKA"); } String dataSourceName = (getStreamDefinition().getStreamId() + "_CUSTOMIZED").toUpperCase(); getStreamDefinition().setDataSource(dataSourceName); getStreamSource().setName(dataSourceName); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setTimestampColumn("timestamp"); codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); Properties streamNameSelectorProp = new Properties(); streamNameSelectorProp.put("userProvidedStreamName", streamSource.getName()); codec.setStreamNameSelectorProp(streamNameSelectorProp); if (StringUtils.isBlank(codec.getStreamNameSelectorCls())) { codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); } if (StringUtils.isBlank(codec.getTimestampFormat())) { codec.setTimestampFormat(null); } this.streamSource.setCodec(codec); } }
private Map<String, Kafka2TupleMetadata> createDatasource(final String topicName, final String dataSourceName) { Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName(dataSourceName); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>()); ds.setTopic(topicName); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds); return dsMap; }
ds.setName("ds-name"); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>());
private static Kafka2TupleMetadata createKafka2TupleMetadata() { Kafka2TupleMetadata ktm = new Kafka2TupleMetadata(); ktm.setName(TEST_DATASOURCE_1); ktm.setSchemeCls("SchemeClass"); ktm.setTopic("tupleTopic"); ktm.setType("KAFKA"); ktm.setCodec(new Tuple2StreamMetadata()); return ktm; }
Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setType("KAFKA"); datasource.setName(streamDesc.getStreamId()); datasource.setTopic(kafkaCfg.getTopicId()); datasource.setSchemeCls(JsonScheme.class.getCanonicalName());
for (String topic : plainStringTopics) { Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName(topic); kafka2TupleMetadata.setTopic(topic); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); kafka2TupleMetadata.setName(topic); kafka2TupleMetadata.setTopic(topic); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.JsonScheme");
@Test public void testKafka2TupleMetadata() { Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName("setName"); kafka2TupleMetadata.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata.setType("setType"); kafka2TupleMetadata.setTopic("setTopic"); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Kafka2TupleMetadata kafka2TupleMetadata1 = new Kafka2TupleMetadata(); kafka2TupleMetadata1.setName("setName"); kafka2TupleMetadata1.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata1.setType("setType"); kafka2TupleMetadata1.setTopic("setTopic"); kafka2TupleMetadata1.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Assert.assertFalse(kafka2TupleMetadata1 == kafka2TupleMetadata); Assert.assertTrue(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertTrue(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); kafka2TupleMetadata1.setType("setType1"); Assert.assertFalse(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertFalse(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); } }
ds.setName(DS_NAME); ds.setTopic(TEST_TOPIC); ds.setCodec(new Tuple2StreamMetadata());