@SuppressWarnings("rawtypes") @Override public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) { if (LOG.isDebugEnabled()) { LOG.debug("open method invoked"); } this.conf = conf; this.context = context; this.collector = collector; this.taskIndex = context.getThisTaskIndex(); // initialize an empty SpoutSpec cachedSpoutSpec = new SpoutSpec(topologyId, new HashMap<>(), new HashMap<>(), new HashMap<>()); changeNotifyService.registerListener(this); changeNotifyService.init(config, MetadataType.SPOUT); // register KafkaSpout metric kafkaSpoutMetric = new KafkaSpoutMetric(); context.registerMetric("kafkaSpout", kafkaSpoutMetric, 60); this.serializer = Serializers.newPartitionedEventSerializer(this); }
SpoutSpec spoutSpec = new SpoutSpec(topo.getName(), streamsMap, tss, dss); topoSpoutSpecsMap.put(topo.getName(), spoutSpec);
streamMetadatas.put(dataSourceName, Arrays.asList(m1)); SpoutSpec cachedMetadata = new SpoutSpec(topoId, streamMetadatas, null, dsMap); spout.onReload(new SpoutSpec(topoId, new HashMap<String, List<StreamRepartitionMetadata>>(), new HashMap<>(), new HashMap<String, Kafka2TupleMetadata>()), null); } catch (Exception ex) {
dataSources.put(dataSourceName, Arrays.asList(m1, m2)); SpoutSpec newMetadata = new SpoutSpec(topoId, dataSources, null, dsMap); spout.onReload(newMetadata, null); Assert.assertTrue(verified.get());
dataSources.put(ds.getName(), Arrays.asList(m1)); SpoutSpec newMetadata = new SpoutSpec(topoId, dataSources, null, dsMap);
SpoutSpec newSpec = new SpoutSpec(topologyName, streamRepartitionMetadataMap, tuple2StreamMetadataMap, kafka2TupleMetadataMap); String json = MetadataSerDeser.serialize(newSpec); System.out.println(json);