private Kafka2TupleMetadata buildAggregationDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
private Map<String, Kafka2TupleMetadata> createDatasource(final String topicName, final String dataSourceName) { Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName(dataSourceName); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>()); ds.setTopic(topicName); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds); return dsMap; }
private List<String> getTopics(SpoutSpec spoutSpec) { List<String> meta = new ArrayList<String>(); for (Kafka2TupleMetadata entry : spoutSpec.getKafka2TupleMetadataMap().values()) { meta.add(entry.getTopic()); } return meta; }
for (Kafka2TupleMetadata ds : context.getDataSourceMetadata().values()) { if (!sds.stream().anyMatch(t -> t.getDataSource().equals(ds.getName()))) { state.appendUnusedDatasource(ds.getName()); if (!"KAFKA".equalsIgnoreCase(ds.getType())) { state.appendDataSourceValidation(ds.getName(), String.format(" unsupported data source type %s !", ds.getType())); if (ds.getCodec() == null) { state.appendDataSourceValidation(ds.getName(), String.format("codec of datasource must *not* be null!")); continue;
@Test public void testKafka2TupleMetadata() { Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName("setName"); kafka2TupleMetadata.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata.setType("setType"); kafka2TupleMetadata.setTopic("setTopic"); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Kafka2TupleMetadata kafka2TupleMetadata1 = new Kafka2TupleMetadata(); kafka2TupleMetadata1.setName("setName"); kafka2TupleMetadata1.setCodec(new Tuple2StreamMetadata()); kafka2TupleMetadata1.setType("setType"); kafka2TupleMetadata1.setTopic("setTopic"); kafka2TupleMetadata1.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); Assert.assertFalse(kafka2TupleMetadata1 == kafka2TupleMetadata); Assert.assertTrue(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertTrue(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); kafka2TupleMetadata1.setType("setType1"); Assert.assertFalse(kafka2TupleMetadata1.equals(kafka2TupleMetadata)); Assert.assertFalse(kafka2TupleMetadata1.hashCode() == kafka2TupleMetadata.hashCode()); } }
Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setType("KAFKA"); datasource.setName(streamDesc.getStreamId()); datasource.setTopic(kafkaCfg.getTopicId()); datasource.setSchemeCls(JsonScheme.class.getCanonicalName()); datasource.setProperties(new HashMap<>()); Map<String, String> properties = datasource.getProperties(); properties.put(AlertConstants.KAFKA_BROKER_ZK_BASE_PATH, streamSourceConfig.getBrokerZkPath()); properties.put(AlertConstants.KAFKA_BROKER_ZK_QUORUM, streamSourceConfig.getBrokerZkQuorum()); tuple2Stream.setTimestampColumn("timestamp"); tuple2Stream.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getCanonicalName()); datasource.setCodec(tuple2Stream); alertMetadataService.addDataSource(datasource);
public void validateAndEnsureDefault() { Preconditions.checkNotNull(streamSource); Preconditions.checkNotNull(streamDefinition); if (streamSource.getType() == null) { streamSource.setType("KAFKA"); } String dataSourceName = (getStreamDefinition().getStreamId() + "_CUSTOMIZED").toUpperCase(); getStreamDefinition().setDataSource(dataSourceName); getStreamSource().setName(dataSourceName); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setTimestampColumn("timestamp"); codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); Properties streamNameSelectorProp = new Properties(); streamNameSelectorProp.put("userProvidedStreamName", streamSource.getName()); codec.setStreamNameSelectorProp(streamNameSelectorProp); if (StringUtils.isBlank(codec.getStreamNameSelectorCls())) { codec.setStreamNameSelectorCls(JsonStringStreamNameSelector.class.getName()); } if (StringUtils.isBlank(codec.getTimestampFormat())) { codec.setTimestampFormat(null); } this.streamSource.setCodec(codec); } }
Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName(DS_NAME); ds.setTopic(TEST_TOPIC); ds.setCodec(new Tuple2StreamMetadata()); context.addDataSource(ds);
Map<String, Kafka2TupleMetadata> kafka2TupleMetadataMap = new HashMap<>(); for (String topic : plainStringTopics) { Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName(topic); kafka2TupleMetadata.setTopic(topic); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.PlainStringScheme"); kafka2TupleMetadataMap.put(topic, kafka2TupleMetadata); Kafka2TupleMetadata kafka2TupleMetadata = new Kafka2TupleMetadata(); kafka2TupleMetadata.setName(topic); kafka2TupleMetadata.setTopic(topic); kafka2TupleMetadata.setSchemeCls("org.apache.eagle.alert.engine.scheme.JsonScheme"); kafka2TupleMetadataMap.put(topic, kafka2TupleMetadata);
Map<String, Map<String, String>> dataSourceProperties = new HashMap<>(); for (Kafka2TupleMetadata ds : newMeta.getKafka2TupleMetadataMap().values()) { newSchemaName.put(ds.getTopic(), ds.getSchemeCls()); dataSourceProperties.put(ds.getTopic(), ds.getProperties());
public void addDataSource(Kafka2TupleMetadata dataSource) { this.datasources.put(dataSource.getName(), dataSource); }
for (String dataSourceId : usage.getDataSources()) { Kafka2TupleMetadata ds = datasourcesMap.get(dataSourceId); dss.put(ds.getTopic(), ds); tss.put(ds.getTopic(), ds.getCodec()); String stream = policyStreamPartition.getStreamId(); StreamDefinition schema = streamSchemaMap.get(stream); String topic = datasourcesMap.get(schema.getDataSource()).getTopic();
private void validateStreams() { Collection<Kafka2TupleMetadata> datasources = context.getDataSourceMetadata().values(); Collection<PolicyDefinition> definitions = context.getPolicies().values(); for (StreamDefinition sd : context.getStreamSchemas().values()) { if (!datasources.stream().anyMatch(d -> d.getName().equals(sd.getDataSource()))) { state.appendStreamValidation(sd.getStreamId(), String.format("stream %s reference unknown data source %s !", sd.getStreamId(), sd.getDataSource())); } if (!definitions.stream().anyMatch(p -> p.getInputStreams().contains(sd.getStreamId()))) { state.appendUnusedStreams(sd.getStreamId()); } // more on columns if (sd.getColumns() == null || sd.getColumns().size() == 0) { state.appendStreamValidation(sd.getStreamId(), String.format("stream %s have empty columns!", sd.getStreamId())); } } }
private Kafka2TupleMetadata buildAggregationOutputDatasource() { Kafka2TupleMetadata datasource = new Kafka2TupleMetadata(); datasource.setName(NODATA_ALERT_AGGR_OUTPUT_DATASOURCE_NAME); datasource.setType(DATASOURCE_TYPE); datasource.setSchemeCls(DATASOURCE_SCHEME_CLS); datasource.setTopic(NODATA_ALERT_AGGR_OUTPUT_TOPIC_NAME); Tuple2StreamMetadata codec = new Tuple2StreamMetadata(); codec.setStreamNameSelectorCls(JSON_STRING_STREAM_NAME_SELECTOR_CLS); codec.setTimestampColumn(STREAM_TIMESTAMP_COLUMN_NAME); codec.setTimestampFormat(STREAM_TIMESTAMP_FORMAT); Properties codecProperties = new Properties(); codecProperties.put("userProvidedStreamName", NODATA_ALERT_AGGR_OUTPUT_STREAM); codecProperties.put("streamNameFormat", "%s"); codec.setStreamNameSelectorProp(codecProperties); datasource.setCodec(codec); return datasource; }
Kafka2TupleMetadata ds = new Kafka2TupleMetadata(); ds.setName("ds-name"); ds.setType("KAFKA"); ds.setProperties(new HashMap<String, String>()); ds.setTopic("name-of-topic1"); ds.setSchemeCls("PlainStringScheme"); ds.setCodec(new Tuple2StreamMetadata()); Map<String, Kafka2TupleMetadata> dsMap = new HashMap<String, Kafka2TupleMetadata>(); dsMap.put(ds.getName(), ds); StreamRepartitionMetadata m1 = new StreamRepartitionMetadata(ds.getName(), "s1"); dataSources.put(ds.getName(), Arrays.asList(m1));
@SuppressWarnings("unchecked") private static <T, K> K getKey(T t) { if (t instanceof Topology) { return (K) ((Topology) t).getName(); } else if (t instanceof PolicyAssignment) { return (K) ((PolicyAssignment) t).getPolicyName(); } else if (t instanceof Kafka2TupleMetadata) { return (K) ((Kafka2TupleMetadata) t).getName(); } else if (t instanceof PolicyDefinition) { return (K) ((PolicyDefinition) t).getName(); } else if (t instanceof Publishment) { return (K) ((Publishment) t).getName(); } else if (t instanceof StreamDefinition) { return (K) ((StreamDefinition) t).getStreamId(); } else if (t instanceof MonitoredStream) { return (K) ((MonitoredStream) t).getStreamGroup(); } throw new RuntimeException("unexpected key class " + t.getClass()); }
private static Kafka2TupleMetadata createKafka2TupleMetadata() { Kafka2TupleMetadata ktm = new Kafka2TupleMetadata(); ktm.setName(TEST_DATASOURCE_1); ktm.setSchemeCls("SchemeClass"); ktm.setTopic("tupleTopic"); ktm.setType("KAFKA"); ktm.setCodec(new Tuple2StreamMetadata()); return ktm; }
@Path("/streams/create") @POST public OpResult createStream(StreamDefinitionWrapper stream) { Preconditions.checkNotNull(stream.getStreamDefinition(),"Stream definition is null"); Preconditions.checkNotNull(stream.getStreamSource(),"Stream source is null"); stream.validateAndEnsureDefault(); OpResult createStreamResult = dao.createStream(stream.getStreamDefinition()); OpResult createDataSourceResult = dao.addDataSource(stream.getStreamSource()); // TODO: Check kafka topic exist or not. if (createStreamResult.code == OpResult.SUCCESS && createDataSourceResult.code == OpResult.SUCCESS) { return OpResult.success("Successfully create stream " + stream.getStreamDefinition().getStreamId() + ", and datasource " + stream.getStreamSource().getName()); } else { return OpResult.fail("Error: " + StringUtils.join(new String[]{createDataSourceResult.message, createDataSourceResult.message},",")); } }