private static Class<?>[] extractTypeClasses(TypeInformation[] fieldTypes) { Class<?>[] classes = new Class<?>[fieldTypes.length]; for (int i = 0; i < fieldTypes.length; i++) { classes[i] = fieldTypes[i].getTypeClass(); } return classes; }
public ValueArrayTypeInfo(TypeInformation<T> valueType) { this.valueType = valueType; this.type = valueType == null ? null : valueType.getTypeClass(); }
private Object[] convertToObjectArray(Schema elementSchema, TypeInformation<?> elementInfo, Object object) { final List<?> list = (List<?>) object; final Object[] convertedArray = (Object[]) Array.newInstance( elementInfo.getTypeClass(), list.size()); for (int i = 0; i < list.size(); i++) { convertedArray[i] = convertAvroType(elementSchema, elementInfo, list.get(i)); } return convertedArray; }
@Override public Type getRawType() { return result.getType().getTypeClass(); }
public ArrayFieldAccessor(int pos, TypeInformation typeInfo) { if (pos < 0) { throw new CompositeType.InvalidFieldReferenceException("The " + ((Integer) pos).toString() + ". field selected on" + " an array, which is an invalid index."); } checkNotNull(typeInfo, "typeInfo must not be null."); this.pos = pos; this.fieldType = BasicTypeInfo.getInfoFor(typeInfo.getTypeClass().getComponentType()); }
public AggregateOperator<IN> and(Aggregations function, int field) { Preconditions.checkNotNull(function); TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) getType(); if (field < 0 || field >= inType.getArity()) { throw new IllegalArgumentException("Aggregation field position is out of range."); } AggregationFunctionFactory factory = function.getFactory(); AggregationFunction<?> aggFunct = factory.createAggregationFunction(inType.getTypeAt(field).getTypeClass()); this.aggregationFunctions.add(aggFunct); this.fields.add(field); return this; }
@SuppressWarnings("unchecked") @Override @PublicEvolving public TypeSerializer<T> createSerializer(ExecutionConfig executionConfig) { return (TypeSerializer<T>) new GenericArraySerializer<C>( componentInfo.getTypeClass(), componentInfo.createSerializer(executionConfig)); }
public ScalaAggregateOperator<IN> and(Aggregations function, int field) { Preconditions.checkNotNull(function); TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) getType(); if (field < 0 || field >= inType.getArity()) { throw new IllegalArgumentException("Aggregation field position is out of range."); } AggregationFunctionFactory factory = function.getFactory(); AggregationFunction<?> aggFunct = factory.createAggregationFunction(inType.getTypeAt(field).getTypeClass()); this.aggregationFunctions.add(aggFunct); this.fields.add(field); return this; }
private Object convertObjectArray(JsonNode node, TypeInformation<?> elementType) { final Object[] array = (Object[]) Array.newInstance(elementType.getTypeClass(), node.size()); for (int i = 0; i < node.size(); i++) { array[i] = convert(node.get(i), elementType); } return array; }
public SumAggregator(int pos, TypeInformation<T> typeInfo, ExecutionConfig config) { fieldAccessor = FieldAccessorFactory.getAccessor(typeInfo, pos, config); adder = SumFunction.getForClass(fieldAccessor.getFieldType().getTypeClass()); if (typeInfo instanceof TupleTypeInfo) { isTuple = true; serializer = null; } else { isTuple = false; this.serializer = typeInfo.createSerializer(config); } }
public SumAggregator(String field, TypeInformation<T> typeInfo, ExecutionConfig config) { fieldAccessor = FieldAccessorFactory.getAccessor(typeInfo, field, config); adder = SumFunction.getForClass(fieldAccessor.getFieldType().getTypeClass()); if (typeInfo instanceof TupleTypeInfo) { isTuple = true; serializer = null; } else { isTuple = false; this.serializer = typeInfo.createSerializer(config); } }
@SuppressWarnings("unchecked") public static <R extends Tuple> TupleSummaryAggregator<R> create(TupleTypeInfoBase<?> inType) { Aggregator[] columnAggregators = new Aggregator[inType.getArity()]; for (int field = 0; field < inType.getArity(); field++) { Class clazz = inType.getTypeAt(field).getTypeClass(); columnAggregators[field] = SummaryAggregatorFactory.create(clazz); } return new TupleSummaryAggregator<>(columnAggregators); }
@Override public CassandraSink<IN> createSink() throws Exception { final CassandraPojoSink<IN> sink = new CassandraPojoSink<>( typeInfo.getTypeClass(), builder, mapperOptions, keyspace, configBuilder.build(), failureHandler); return new CassandraSink<>(input.addSink(sink).name("Cassandra Sink")); }
private <X> DataSource<X> fromCollection(Collection<X> data, TypeInformation<X> type, String callLocationName) { CollectionInputFormat.checkCollection(data, type.getTypeClass()); return new DataSource<>(this, new CollectionInputFormat<>(data, type.createSerializer(config)), type, callLocationName); }
@Test public void testCreateWritableInfo() { TypeInformation<DirectWritable> info1 = TypeExtractor.createHadoopWritableTypeInfo(DirectWritable.class); assertEquals(DirectWritable.class, info1.getTypeClass()); TypeInformation<ViaInterfaceExtension> info2 = TypeExtractor.createHadoopWritableTypeInfo(ViaInterfaceExtension.class); assertEquals(ViaInterfaceExtension.class, info2.getTypeClass()); TypeInformation<ViaAbstractClassExtension> info3 = TypeExtractor.createHadoopWritableTypeInfo(ViaAbstractClassExtension.class); assertEquals(ViaAbstractClassExtension.class, info3.getTypeClass()); }
@Test public void testReturnType() throws Exception { CsvReader reader = getCsvReader(); DataSource<Item> items = reader.tupleType(Item.class); Assert.assertTrue(items.getType().getTypeClass() == Item.class); }
@Test public void testExtractFromFunction() { RichMapFunction<DirectWritable, DirectWritable> function = new RichMapFunction<DirectWritable, DirectWritable>() { @Override public DirectWritable map(DirectWritable value) throws Exception { return null; } }; TypeInformation<DirectWritable> outType = TypeExtractor.getMapReturnTypes(function, new WritableTypeInfo<>(DirectWritable.class)); assertTrue(outType instanceof WritableTypeInfo); assertEquals(DirectWritable.class, outType.getTypeClass()); }
@Test public void testWithValueType() throws Exception { CsvReader reader = getCsvReader(); DataSource<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> items = reader.types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class); TypeInformation<?> info = items.getType(); Assert.assertEquals(true, info.isTupleType()); Assert.assertEquals(Tuple8.class, info.getTypeClass()); }
@Test(expected = TableException.class) public void testGenericRowWithAlias() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); // use null value the enforce GenericType DataSet<Row> dataSet = env.fromElements(Row.of((Integer) null)); assertTrue(dataSet.getType() instanceof GenericTypeInfo); assertTrue(dataSet.getType().getTypeClass().equals(Row.class)); // Must fail. Cannot import DataSet<Row> with GenericTypeInfo. tableEnv.fromDataSet(dataSet, "nullField"); }
@Test(expected = TableException.class) public void testGenericRow() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); // use null value the enforce GenericType DataSet<Row> dataSet = env.fromElements(Row.of(1, 2L, "Hello", null)); assertTrue(dataSet.getType() instanceof GenericTypeInfo); assertTrue(dataSet.getType().getTypeClass().equals(Row.class)); // Must fail. Cannot import DataSet<Row> with GenericTypeInfo. tableEnv.fromDataSet(dataSet); }