public static ChannelSelector create(List<Channel> channels, Map<String, String> config) { ChannelSelector selector = getSelectorForType(config.get( BasicConfigurationConstants.CONFIG_TYPE)); selector.setChannels(channels); Context context = new Context(); context.putAll(config); Configurables.configure(selector, context); return selector; }
Configurables.configure(group, groupConf); sinkRunnerMap.put(comp.getComponentName(), new SinkRunner(group.getProcessor())); sinkMap.add(entry.getValue()); pr.setSinks(sinkMap); Configurables.configure(pr, new Context()); sinkRunnerMap.put(entry.getKey(), new SinkRunner(pr)); } catch (Exception e) {
public static ChannelSelector create(List<Channel> channels, ChannelSelectorConfiguration conf) { String type = ChannelSelectorType.REPLICATING.toString(); if (conf != null) { type = conf.getType(); } ChannelSelector selector = getSelectorForType(type); selector.setChannels(channels); Configurables.configure(selector, conf); return selector; }
Configurables.configure(processor, context); return processor;
@SuppressWarnings("unchecked") public static SinkProcessor getProcessor(ComponentConfiguration conf, List<Sink> sinks) { String typeStr = conf.getType(); SinkProcessor processor; SinkProcessorType type = SinkProcessorType.DEFAULT; try { type = SinkProcessorType.valueOf(typeStr.toUpperCase(Locale.ENGLISH)); } catch (Exception ex) { logger.warn("Sink type {} does not exist, using default", typeStr); } Class<? extends SinkProcessor> processorClass = null; try { processorClass = (Class<? extends SinkProcessor>) Class.forName(type .getSinkProcessorClassName()); } catch (Exception ex) { throw new FlumeException("Unable to load sink processor type: " + typeStr + ", class: " + type.getSinkProcessorClassName(), ex); } try { processor = processorClass.newInstance(); } catch (Exception e) { throw new FlumeException("Unable to create processor, type: " + typeStr + ", class: " + type.getSinkProcessorClassName(), e); } processor.setSinks(sinks); Configurables.configure(processor, conf); return processor; }
/** * Tests that sub-properties (kafka.producer.*) apply correctly across multiple invocations * of configure() (fix for FLUME-2857). */ @Test public void testDefaultSettingsOnReConfigure() { String sampleProducerProp = "compression.type"; String sampleProducerVal = "snappy"; Context context = prepareDefaultContext(); context.put(KafkaSinkConstants.KAFKA_PRODUCER_PREFIX + sampleProducerProp, sampleProducerVal); KafkaSink kafkaSink = new KafkaSink(); Configurables.configure(kafkaSink, context); Assert.assertEquals(sampleProducerVal, kafkaSink.getKafkaProps().getProperty(sampleProducerProp)); context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Assert.assertNull(kafkaSink.getKafkaProps().getProperty(sampleProducerProp)); }
private Sink.Status prepareAndSend(Context context, String msg) throws EventDeliveryException { Sink kafkaSink = new KafkaSink(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes()); memoryChannel.put(event); tx.commit(); tx.close(); return kafkaSink.process(); }
@Test public void testTopicAndKeyFromHeader() { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "test-topic-and-key-from-header"; Map<String, String> headers = new HashMap<String, String>(); headers.put("topic", TestConstants.CUSTOM_TOPIC); headers.put("key", TestConstants.CUSTOM_KEY); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes(), headers); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, TestConstants.CUSTOM_TOPIC); }
@Test public void testEmptyChannel() throws EventDeliveryException { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); Sink.Status status = kafkaSink.process(); if (status != Sink.Status.BACKOFF) { fail("Error Occurred"); } ConsumerRecords recs = pollConsumerRecords(DEFAULT_TOPIC, 2); assertNotNull(recs); assertEquals(recs.count(), 0); }
@Test public void testKafkaProperties() { KafkaSink kafkaSink = new KafkaSink(); Context context = new Context(); context.put(KAFKA_PREFIX + TOPIC_CONFIG, ""); context.put(KAFKA_PRODUCER_PREFIX + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "override.default.serializer"); context.put("kafka.producer.fake.property", "kafka.property.value"); context.put("kafka.bootstrap.servers", "localhost:9092,localhost:9092"); context.put("brokerList", "real-broker-list"); Configurables.configure(kafkaSink, context); Properties kafkaProps = kafkaSink.getKafkaProps(); //check that we have defaults set assertEquals(kafkaProps.getProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG), DEFAULT_KEY_SERIALIZER); //check that kafka properties override the default and get correct name assertEquals(kafkaProps.getProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG), "override.default.serializer"); //check that any kafka-producer property gets in assertEquals(kafkaProps.getProperty("fake.property"), "kafka.property.value"); //check that documented property overrides defaults assertEquals(kafkaProps.getProperty("bootstrap.servers"), "localhost:9092,localhost:9092"); }
@Test public void testReplaceSubStringOfTopicWithHeaders() { String topic = TestConstants.HEADER_1_VALUE + "-topic"; Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); context.put(TOPIC_CONFIG, TestConstants.HEADER_TOPIC); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "test-replace-substring-of-topic-with-headers"; Map<String, String> headers = new HashMap<>(); headers.put(TestConstants.HEADER_1_KEY, TestConstants.HEADER_1_VALUE); Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes(), headers); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, topic); }
@OnScheduled public void onScheduled(final ProcessContext context) { try { channel = new NifiSinkSessionChannel(SUCCESS, FAILURE); channel.start(); sink = SINK_FACTORY.create(context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SINK_TYPE).getValue()); sink.setChannel(channel); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sinkName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(sink, getFlumeSinkContext(flumeConfig, agentName, sinkName)); sink.start(); } catch (Throwable th) { getLogger().error("Error creating sink", th); throw Throwables.propagate(th); } }
Context context = prepareDefaultContext(); context.put(KafkaSinkConstants.TOPIC_OVERRIDE_HEADER, customTopicHeader); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start();
@OnScheduled public void onScheduled(final ProcessContext context) { try { source = SOURCE_FACTORY.create( context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SOURCE_TYPE).getValue()); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sourceName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(source, getFlumeSourceContext(flumeConfig, agentName, sourceName)); if (source instanceof PollableSource) { source.setChannelProcessor(new ChannelProcessor( new NifiChannelSelector(pollableSourceChannel))); source.start(); } } catch (Throwable th) { getLogger().error("Error creating source", th); throw Throwables.propagate(th); } }
@Test public void testOldProperties() { KafkaSink kafkaSink = new KafkaSink(); Context context = new Context(); context.put("topic", "test-topic"); context.put(OLD_BATCH_SIZE, "300"); context.put(BROKER_LIST_FLUME_KEY, "localhost:9092,localhost:9092"); context.put(REQUIRED_ACKS_FLUME_KEY, "all"); Configurables.configure(kafkaSink, context); Properties kafkaProps = kafkaSink.getKafkaProps(); assertEquals(kafkaSink.getTopic(), "test-topic"); assertEquals(kafkaSink.getBatchSize(), 300); assertEquals(kafkaProps.getProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG), "localhost:9092,localhost:9092"); assertEquals(kafkaProps.getProperty(ProducerConfig.ACKS_CONFIG), "all"); }
@Test public void testDefaultTopic() { Sink kafkaSink = new KafkaSink(); Context context = prepareDefaultContext(); Configurables.configure(kafkaSink, context); Channel memoryChannel = new MemoryChannel(); Configurables.configure(memoryChannel, context); kafkaSink.setChannel(memoryChannel); kafkaSink.start(); String msg = "default-topic-test"; Transaction tx = memoryChannel.getTransaction(); tx.begin(); Event event = EventBuilder.withBody(msg.getBytes()); memoryChannel.put(event); tx.commit(); tx.close(); try { Sink.Status status = kafkaSink.process(); if (status == Sink.Status.BACKOFF) { fail("Error Occurred"); } } catch (EventDeliveryException ex) { // ignore } checkMessageArrived(msg, DEFAULT_TOPIC); }