public GafferMapFunction() { serialisationType = (DeserializationSchema) new SimpleStringSchema(); }
public void setConsumeAs(final Class<T> consumeAs) { if (null == consumeAs || String.class == consumeAs) { serialisationType = (DeserializationSchema) new SimpleStringSchema(); } else if (byte[].class == consumeAs) { serialisationType = (DeserializationSchema) new ByteArraySchema(); } else { throw new IllegalArgumentException("This Flink handler cannot consume records as " + consumeAs + ". You must use either byte[] or String."); } }
/** * Create Kafka Source */ private static FlinkKafkaConsumer082<String> kafkaSource(BenchmarkConfig config) { return new FlinkKafkaConsumer082<>( config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); }
/** * Create a Kafka source */ private static FlinkKafkaConsumer082<String> kafkaSource(BenchmarkConfig config) { return new FlinkKafkaConsumer082<>( config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); }
/** * Setup kafka source */ private static FlinkKafkaConsumer08<String> kafkaSource(BenchmarkConfig config) { return new FlinkKafkaConsumer08<>( config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); }
/** * Configure Kafka source */ private static FlinkKafkaConsumer082<String> kafkaSource(BenchmarkConfig config) { return new FlinkKafkaConsumer082<>( config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); }
private SourceFunction<String> getKafkaConsumer(KafkaTransportProtocol protocol) { if (protocol.getTopicDefinition() instanceof SimpleTopicDefinition) { return new FlinkKafkaConsumer010<>(protocol .getTopicDefinition() .getActualTopicName(), new SimpleStringSchema (), getProperties(protocol)); } else { String patternTopic = replaceWildcardWithPatternFormat(protocol.getTopicDefinition().getActualTopicName()); return new FlinkKafkaConsumer010<>(Pattern.compile(patternTopic), new SimpleStringSchema (), getProperties(protocol)); } }
/** * Choose either Kafka or data generator as source */ private static DataStream<String> sourceStream(BenchmarkConfig config, StreamExecutionEnvironment env) { RichParallelSourceFunction<String> source; String sourceName; if (config.useLocalEventGenerator) { HighKeyCardinalityGeneratorSource eventGenerator = new HighKeyCardinalityGeneratorSource(config); source = eventGenerator; sourceName = "EventGenerator"; } else { source = new FlinkKafkaConsumer082<>(config.kafkaTopic, new SimpleStringSchema(), config.getParameters().getProperties()); sourceName = "Kafka"; } return env.addSource(source, sourceName); }
public static void main(String[] args) { Pipeline p = initializePipeline(args); KafkaOptions options = getOptions(p); PCollection<String> words = p.apply(Create.of("These", "are", "some", "words")); FlinkKafkaProducer08<String> kafkaSink = new FlinkKafkaProducer08<>(options.getKafkaTopic(), new SimpleStringSchema(), getKafkaProps(options)); words.apply(Write.to(UnboundedFlinkSink.of(kafkaSink))); p.run(); }
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(parameterTool); DataStream<String> rawMessageStream = env.addSource(new FlinkKafkaConsumer082<>( parameterTool.getRequired("kafka.topic"), new SimpleStringSchema(), parameterTool.getProperties())); rawMessageStream.print(); env.execute(); } }
public static void main(String[] args) { Pipeline p = initializePipeline(args); KafkaOptions options = getOptions(p); FlinkKafkaConsumer08<String> kafkaConsumer = new FlinkKafkaConsumer08<>(options.getKafkaTopic(), new SimpleStringSchema(), getKafkaProps(options)); p .apply(Read.from(UnboundedFlinkSource.of(kafkaConsumer))).setCoder(StringUtf8Coder.of()) .apply(ParDo.of(new PrintFn<>())); p.run(); }
public static void main(String[] args) throws Exception { // create execution environment StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // parse user parameters ParameterTool parameterTool = ParameterTool.fromArgs(args); DataStream<String> messageStream = env.addSource(new FlinkKafkaConsumer082<>(parameterTool.getRequired("topic"), new SimpleStringSchema(), parameterTool.getProperties())); // print() will write the contents of the stream to the TaskManager's standard out stream // the rebelance call is causing a repartitioning of the data so that all machines // see the messages (for example in cases when "num kafka partitions" < "num flink operators" messageStream.rebalance().map(new MapFunction<String, String>() { private static final long serialVersionUID = -6867736771747690202L; @Override public String map(String value) throws Exception { return "Kafka and Flink says: " + value; } }).print(); env.execute(); } }
new SimpleStringSchema(), properties)) .name("KafkaSource"); DataStream<Object[]> tweeterStream = kafkaSourceStream