@Override public void close() { super.close(); } }
@Override public Map<String, Object> getComponentConfiguration () { return super.getComponentConfiguration(); }
@Override public void ack(Object msgId) { super.ack(msgId); } }
@Override public void deactivate() { super.deactivate(); active = false; }
@Override public void activate() { super.activate(); active = true; }
@Override public void fail(Object msgId) { LOG.trace("Fail received for msg id {} on spout {}", msgId, spoutId); super.fail(msgId); if (ackEnabled) { HdfsUtils.Pair<MessageId, List<Object>> item = HdfsUtils.Pair.of(msgId, inflight.remove(msgId)); retryList.add(item); } }
@Override public void deactivate() { super.deactivate(); active = false; } }
@Override public void activate() { super.activate(); active = true; }
@Override public void fail(Object msgId) { LOG.trace("Fail received for msg id {} on spout {}", msgId, spoutId); super.fail(msgId); if (ackEnabled) { HdfsUtils.Pair<MessageId, List<Object>> item = HdfsUtils.Pair.of(msgId, inflight.remove(msgId)); retryList.add(item); } }
@Override public void close() { //TODO super.close(); }
@Override public Map<String, Object> getComponentConfiguration() { Map<String, Object> configuration = super.getComponentConfiguration(); if (configuration == null) { configuration = new HashMap<>(); } String configKeyPrefix = "config."; configuration.put(configKeyPrefix + "topics", getTopicsString()); configuration.put(configKeyPrefix + "groupid", kafkaSpoutConfig.getConsumerGroupId()); for (Entry<String, Object> conf : kafkaSpoutConfig.getKafkaProps().entrySet()) { if (conf.getValue() != null && isPrimitiveOrWrapper(conf.getValue().getClass())) { configuration.put(configKeyPrefix + conf.getKey(), conf.getValue()); } else { LOG.debug("Dropping Kafka prop '{}' from component configuration", conf.getKey()); } } return configuration; }
@Override public void ack(Object msgId) { super.ack(msgId); } }
@Override public void deactivate() { super.deactivate(); active = false; }
@Override public void activate() { super.activate(); active = true; }
@Override public void fail(Object msgId) { try { if (msgId != null && ConsumerRecord.class.isInstance(msgId)) { ConsumerRecord<String, byte[]> record = getMessageId(msgId); this.flowedMsgCount--; } processedCount++; super.fail(msgId); } catch (Exception e) { LOG.error("DataSplittingSpout:Fail ack throwed exception!", e); } }
@Override public void close() { super.close(); spoutReceiver.shutdown(); }
@Override public Map<String, Object> getComponentConfiguration() { Map<String, Object> configuration = super.getComponentConfiguration(); if (configuration == null) { configuration = new HashMap<>(); } String configKeyPrefix = "config."; configuration.put(configKeyPrefix + "topics", getTopicsString()); configuration.put(configKeyPrefix + "groupid", kafkaSpoutConfig.getConsumerGroupId()); configuration.put(configKeyPrefix + "bootstrap.servers", kafkaSpoutConfig.getKafkaProps().get("bootstrap.servers")); configuration.put(configKeyPrefix + "security.protocol", kafkaSpoutConfig.getKafkaProps().get("security.protocol")); return configuration; }
@Override public void ack(Object msgId) { ++ackCount; super.ack(msgId); }
@Override public void deactivate() { super.deactivate(); active = false; }
@Override public void activate() { super.activate(); active = true; }