@Override protected <T> TypeSerializer<T> createSerializer(Class<T> type) { TypeInformation<T> typeInfo = TypeExtractor.getForClass(type); return typeInfo.createSerializer(new ExecutionConfig()); } }
/** * Creates a new named {@code OutputTag} with the given id. * * @param id The id of the created {@code OutputTag}. */ public OutputTag(String id) { Preconditions.checkNotNull(id, "OutputTag id cannot be null."); Preconditions.checkArgument(!id.isEmpty(), "OutputTag id must not be empty."); this.id = id; try { this.typeInfo = TypeExtractor.createTypeInfo(this, OutputTag.class, getClass(), 0); } catch (InvalidTypesException e) { throw new InvalidTypesException("Could not determine TypeInformation for the OutputTag type. " + "The most common reason is forgetting to make the OutputTag an anonymous inner class. " + "It is also not possible to use generic type variables with OutputTags, such as 'Tuple2<A, B>'.", e); } }
Preconditions.checkNotNull(data, "Collection must not be null"); if (data.isEmpty()) { throw new IllegalArgumentException("Collection must not be empty"); typeInfo = TypeExtractor.getForObject(first); + "StreamExecutionEnvironment#fromElements(Collection, TypeInformation)", e); return fromCollection(data, typeInfo);
@SuppressWarnings("unchecked") @Override public TypeInformation<Tuple2<KEYOUT, VALUEOUT>> getProducedType() { Class<KEYOUT> outKeyClass = (Class<KEYOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 2); Class<VALUEOUT> outValClass = (Class<VALUEOUT>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 3); final TypeInformation<KEYOUT> keyTypeInfo = TypeExtractor.getForClass(outKeyClass); final TypeInformation<VALUEOUT> valueTypleInfo = TypeExtractor.getForClass(outValClass); return new TupleTypeInfo<>(keyTypeInfo, valueTypleInfo); }
@PublicEvolving public static <T> TypeInformation<T> getPartitionerTypes( Partitioner<T> partitioner, String functionName, boolean allowMissing) { return getUnaryOperatorReturnType( partitioner, Partitioner.class, -1, 0, new int[]{0}, null, functionName, allowMissing); }
/** * Adds a column defined by family, qualifier, and type to the table schema. * * @param family the family name * @param qualifier the qualifier name * @param clazz the data type of the qualifier */ void addColumn(String family, String qualifier, Class<?> clazz) { Preconditions.checkNotNull(family, "family name"); Preconditions.checkNotNull(qualifier, "qualifier name"); Preconditions.checkNotNull(clazz, "class type"); Map<String, TypeInformation<?>> qualifierMap = this.familyMap.get(family); if (!HBaseRowInputFormat.isSupportedType(clazz)) { // throw exception throw new IllegalArgumentException("Unsupported class type found " + clazz + ". " + "Better to use byte[].class and deserialize using user defined scalar functions"); } if (qualifierMap == null) { qualifierMap = new LinkedHashMap<>(); } qualifierMap.put(qualifier, TypeExtractor.getForClass(clazz)); familyMap.put(family, qualifierMap); }
/** * Configures the reader to read the CSV data and parse it to the given type. The type must be a subclass of * {@link Tuple}. The type information for the fields is obtained from the type class. The type * consequently needs to specify all generic field types of the tuple. * * @param targetType The class of the target type, needs to be a subclass of Tuple. * @return The DataSet representing the parsed CSV data. */ public <T extends Tuple> DataSource<T> tupleType(Class<T> targetType) { Preconditions.checkNotNull(targetType, "The target type class must not be null."); if (!Tuple.class.isAssignableFrom(targetType)) { throw new IllegalArgumentException("The target type must be a subclass of " + Tuple.class.getName()); } @SuppressWarnings("unchecked") TupleTypeInfo<T> typeInfo = (TupleTypeInfo<T>) TypeExtractor.createTypeInfo(targetType); CsvInputFormat<T> inputFormat = new TupleCsvInputFormat<T>(path, this.lineDelimiter, this.fieldDelimiter, typeInfo, this.includedMask); Class<?>[] classes = new Class<?>[typeInfo.getArity()]; for (int i = 0; i < typeInfo.getArity(); i++) { classes[i] = typeInfo.getTypeAt(i).getTypeClass(); } configureInputFormat(inputFormat); return new DataSource<T>(executionContext, inputFormat, typeInfo, Utils.getCallLocationName()); }
/** * Create a new {@code StateDescriptor} with the given name and the given type information. * * <p>If this constructor fails (because it is not possible to describe the type via a class), * consider using the {@link #StateDescriptor(String, TypeInformation, Object)} constructor. * * @param name The name of the {@code StateDescriptor}. * @param type The class of the type of values in the state. * @param defaultValue The default value that will be set when requesting state without setting * a value before. */ protected StateDescriptor(String name, Class<T> type, @Nullable T defaultValue) { this.name = checkNotNull(name, "name must not be null"); checkNotNull(type, "type class must not be null"); try { this.typeInfo = TypeExtractor.createTypeInfo(type); } catch (Exception e) { throw new RuntimeException( "Could not create the type information for '" + type.getName() + "'. " + "The most common reason is failure to infer the generic type information, due to Java's type erasure. " + "In that case, please pass a 'TypeHint' instead of a class to describe the type. " + "For example, to describe 'Tuple2<String, String>' as a generic type, use " + "'new PravegaDeserializationSchema<>(new TypeHint<Tuple2<String, String>>(){}, serializer);'", e); } this.defaultValue = defaultValue; }
/** * Specifies a {@link KeySelector} for elements from the first input. * * @param keySelector The KeySelector to be used for extracting the first input's key for partitioning. */ public <KEY> Where<KEY> where(KeySelector<T1, KEY> keySelector) { Preconditions.checkNotNull(keySelector); final TypeInformation<KEY> keyType = TypeExtractor.getKeySelectorTypes(keySelector, input1.getType()); return where(keySelector, keyType); }
@Test public void testTypeInfo() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Long> src1 = env.generateSequence(0, 0); assertEquals(TypeExtractor.getForClass(Long.class), src1.getType()); DataStream<Tuple2<Integer, String>> map = src1.map(new MapFunction<Long, Tuple2<Integer, String>>() { @Override public Tuple2<Integer, String> map(Long value) throws Exception { assertEquals(TypeExtractor.getForObject(new Tuple2<>(0, "")), map.getType()); assertEquals(TypeExtractor.getForClass(String.class), window.getType()); assertEquals(TypeExtractor.getForClass(CustomPOJO.class), flatten.getType());
public TypeSerializerFormatTest(int numberOfTuples, long blockSize, int parallelism) { super(numberOfTuples, blockSize, parallelism); resultType = TypeExtractor.getForObject(getRecord(0)); serializer = resultType.createSerializer(new ExecutionConfig()); }
Preconditions.checkNotNull(edges); Preconditions.checkNotNull(translator); TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType( translator, TranslateFunction.class,
public static TypeSerializer<Event> createTypeSerializer() { TypeInformation<Event> typeInformation = (TypeInformation<Event>) TypeExtractor.createTypeInfo(Event.class); return typeInformation.createSerializer(new ExecutionConfig()); } }
private <K> void testKeyRejection(KeySelector<Tuple2<Integer[], String>, K> keySelector, TypeInformation<K> expectedKeyType) { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Tuple2<Integer[], String>> input = env.fromElements( new Tuple2<>(new Integer[] {1, 2}, "barfoo") ); Assert.assertEquals(expectedKeyType, TypeExtractor.getKeySelectorTypes(keySelector, input.getType())); // adjust the rule expectedException.expect(InvalidProgramException.class); expectedException.expectMessage(new StringStartsWith("Type " + expectedKeyType + " cannot be used as key.")); input.keyBy(keySelector); }
private TypeSerializer<?> createSubclassSerializer(Class<?> subclass) { TypeSerializer<?> serializer = TypeExtractor.createTypeInfo(subclass).createSerializer(executionConfig); if (serializer instanceof PojoSerializer) { PojoSerializer<?> subclassSerializer = (PojoSerializer<?>) serializer; subclassSerializer.copyBaseFieldOrder(this); } return serializer; }
@Test public void testSimpleType() { TypeInformation<?> ti = TypeExtractor.createTypeInfo(IntLike.class); assertEquals(INT_TYPE_INFO, ti); ti = TypeExtractor.getForClass(IntLike.class); assertEquals(INT_TYPE_INFO, ti); ti = TypeExtractor.getForObject(new IntLike()); assertEquals(INT_TYPE_INFO, ti); }
/** * Creates a python data stream from the given iterator. * * <p>Note that this operation will result in a non-parallel data stream source, i.e., * a data stream source with a parallelism of one.</p> * * @param iter The iterator of elements to create the data stream from * @return The data stream representing the elements in the iterator * @see StreamExecutionEnvironment#fromCollection(java.util.Iterator, org.apache.flink.api.common.typeinfo.TypeInformation) */ public PythonDataStream from_collection(Iterator<Object> iter) throws Exception { return new PythonDataStream<>(env.addSource(new PythonIteratorFunction(iter), TypeExtractor.getForClass(Object.class)) .map(new AdapterMap<>())); }
@Test public void testPojoAllPublic() { TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(AllPublic.class); checkAllPublicAsserts(typeForClass); TypeInformation<?> typeForObject = TypeExtractor.getForObject(new AllPublic() ); checkAllPublicAsserts(typeForObject); }
@Override public TypeInformation<T> getProducedType() { return TypeExtractor.createTypeInfo(NeighborsFunctionWithVertexValue.class, function.getClass(), 3, null, null); } }
@SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void testGenericPojoTypeInference5() { MyMapper5<Byte> function = new MyMapper5<>(); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes( function, TypeInformation.of(new TypeHint<PojoWithParameterizedFields2<Byte>>(){})); Assert.assertEquals(BasicTypeInfo.BYTE_TYPE_INFO, ti); }