/** * Adds a type information hint about the return type of this operator. This method * can be used in cases where Flink cannot determine automatically what the produced * type of a function is. That can be the case if the function uses generic type variables * in the return type that cannot be inferred from the input type. * * <p>Classes can be used as type hints for non-generic types (classes without generic parameters), * but not for generic types like for example Tuples. For those generic types, please * use the {@link #returns(TypeHint)} method. * * @param typeClass The class of the returned data type. * @return This operator with the type information corresponding to the given type class. */ public SingleOutputStreamOperator<T> returns(Class<T> typeClass) { requireNonNull(typeClass, "type class must not be null."); try { return returns(TypeInformation.of(typeClass)); } catch (InvalidTypesException e) { throw new InvalidTypesException("Cannot infer the type information from the class alone." + "This is most likely because the class represents a generic type. In that case," + "please use the 'returns(TypeHint)' method instead."); } }
/** * Adds a type information hint about the return type of this operator. This method * can be used in cases where Flink cannot determine automatically what the produced * type of a function is. That can be the case if the function uses generic type variables * in the return type that cannot be inferred from the input type. * * <p>Use this method the following way: * <pre>{@code * DataStream<Tuple2<String, Double>> result = * stream.flatMap(new FunctionWithNonInferrableReturnType()) * .returns(new TypeHint<Tuple2<String, Double>>(){}); * }</pre> * * @param typeHint The type hint for the returned data type. * @return This operator with the type information corresponding to the given type hint. */ public SingleOutputStreamOperator<T> returns(TypeHint<T> typeHint) { requireNonNull(typeHint, "TypeHint must not be null"); try { return returns(TypeInformation.of(typeHint)); } catch (InvalidTypesException e) { throw new InvalidTypesException("Cannot infer the type information from the type hint. " + "Make sure that the TypeHint does not use any generic type variables."); } }
private static KeyedStream<Event, Integer> applyTestStatefulOperator( String name, JoinFunction<Event, ComplexPayload, ComplexPayload> stateFunc, KeyedStream<Event, Integer> source, List<TypeSerializer<ComplexPayload>> stateSer, List<Class<ComplexPayload>> stateClass) { return source .map(createArtificialKeyedStateMapper(e -> e, stateFunc, stateSer, stateClass)) .name(name) .uid(name) .returns(Event.class) .keyBy(Event::getKey); }
.connect(timedOutStream) .map(new CoMapTimeout<>()) .returns(outTypeInfo);
.connect(timedOutStream) .map(new CoMapTimeout<>()) .returns(outTypeInfo);
Collections.singletonList(ComplexPayload.class) // KryoSerializer via type extraction ).returns(Event.class).name(KEYED_STATE_OPER_NAME + "_Kryo_and_Custom_Stateful").uid("0002"); Collections.singletonList(ComplexPayloadAvro.class) // AvroSerializer via type extraction ).returns(Event.class).name(KEYED_STATE_OPER_NAME + "_Avro").uid("0003"); .returns(Event.class) .name(OPERATOR_STATE_OPER_NAME).uid("0004");
source.map(new TestMap<Long, Long>()).returns(Long.class).print(); source.flatMap(new TestFlatMap<Long, Long>()).returns(new TypeHint<Long>(){}).print(); source.connect(source).map(new TestCoMap<Long, Long, Integer>()).returns(BasicTypeInfo.INT_TYPE_INFO).print(); source.connect(source).flatMap(new TestCoFlatMap<Long, Long, Integer>()) .returns(BasicTypeInfo.INT_TYPE_INFO).print(); source.connect(source).keyBy(new TestKeySelector<>(), new TestKeySelector<>(), Types.STRING); source.coGroup(source).where(new TestKeySelector<>(), Types.STRING).equalTo(new TestKeySelector<>(), Types.STRING); .between(Time.milliseconds(10L), Time.milliseconds(10L)) .process(new TestProcessJoinFunction<Long, Long, String>()) .returns(Types.STRING); source.keyBy((in) -> in) .intervalJoin(source.keyBy((in) -> in)) source.map(new TestMap<Long, Long>()).returns(Long.class).getType()); map.returns(String.class); fail(); } catch (Exception ignored) {}
private static List<DataStream<TaggedElement>> tagInputStreams(List<DataStream<?>> inputs) { TypeInformation<TaggedElement> typeInfo = createUnionTypeInfo(inputs); List<DataStream<TaggedElement>> taggedInputs = new ArrayList<>(); int dataStreamIndex = 0; for (DataStream<?> input : inputs) { final DataStream<TaggedElement> transformed = input .map(new TaggingMap<>(dataStreamIndex)) .returns(typeInfo); dataStreamIndex++; taggedInputs.add(transformed); } return taggedInputs; }
/** * @param outStreamId The <code>streamId</code> to return as data stream. * @param <T> Type information should match with stream definition. * During execution phase, it will automatically build type information based on stream definition. * @return Return output stream as Tuple * @see SiddhiTypeFactory */ public <T extends Tuple> DataStream<T> returns(String outStreamId) { TypeInformation<T> typeInformation = SiddhiTypeFactory.getTupleTypeInformation(siddhiContext.getAllEnrichedExecutionPlan(), outStreamId); return returns(Collections.singletonList(outStreamId)).map(value -> typeInformation.getTypeClass().cast(value.f1)).returns(typeInformation); }
@Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return execEnv .addSource(this.taxiRideSource) .map(new TaxiRideToRow()).returns(getReturnType()); }
@Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return execEnv .addSource(this.taxiFareSource) .map(new TaxiFareToRow()).returns(getReturnType()); }
public DataStream<String> constructTestPipeline(DataStream<String> source) { return source .map(s -> new HashMap<>(ImmutableMap.of(s.split(",")[0], s.split(",")[1]))) .returns(new TypeHint<HashMap<String, String>>() {}) .keyBy(map -> map.keySet().iterator().next()) .timeWindow(Time.milliseconds(1)) .reduce((l, r) -> l.values().iterator().next().compareTo(r.values().iterator().next()) > 0 ? r : l) .uid(REDUCER_UID) // convert output type to be compatible with BravoTestPipeline#runTestPipeline .map(Map::toString); }
public DataStream<String> constructTestPipeline(DataStream<String> source) { return source .map(s -> { String[] split = s.split(","); return Tuple2.of(Integer.parseInt(split[0]), Integer.parseInt(split[1])); }) .returns(new TypeHint<Tuple2<Integer, Integer>>() {}) .keyBy(0) .map(new MapCounter()) .uid("hello"); }
public DataStream<String> constructTestPipeline(DataStream<String> source) { return source .map(s -> { String[] split = s.split(","); return Tuple2.of(Integer.parseInt(split[0]), Integer.parseInt(split[1])); }) .returns(new TypeHint<Tuple2<Integer, Integer>>() {}) .keyBy(0) .map(new MapCounter()) .uid("hello"); }
public DataStream<String> pipelineWithStringState(DataStream<String> source) { return source .map(Integer::parseInt) .returns(Integer.class) .keyBy(i -> i) .map(new MapWithStringState()) .uid("hello") .map(Tuple2::toString); }
public DataStream<String> pipelineWithIntState(DataStream<String> source) { return source .map(Integer::parseInt) .returns(Integer.class) .keyBy(i -> i) .map(new MapWithIntState()) .uid("hello") .map(Tuple2::toString); }