Codota Logo
TupleTypeInfo
Code IndexAdd Codota to your IDE (free)

How to use
TupleTypeInfo
in
org.apache.flink.api.java.typeutils

Best Java code snippets using org.apache.flink.api.java.typeutils.TupleTypeInfo (Showing top 20 results out of 369)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
ArrayList a =
  • Codota Iconnew ArrayList<String>()
  • Codota Iconnew ArrayList()
  • Codota Iconnew ArrayList<Object>()
  • Smart code suggestions by Codota
}
origin: apache/flink

@Override
protected TupleTypeInfo<?>[] getTestData() {
  return new TupleTypeInfo<?>[] {
    new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO),
    new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO)
  };
}
origin: apache/flink

private TypeInformation<?>[] extractFieldTypes(int[] fields, TypeInformation<?> inType) {
  TupleTypeInfo<?> inTupleType = (TupleTypeInfo<?>) inType;
  TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length];
  for (int i = 0; i < fields.length; i++) {
    fieldTypes[i] = inTupleType.getTypeAt(fields[i]);
  }
  return fieldTypes;
}
origin: apache/flink

/**
 * Projects a {@link Tuple} {@link DataStream} to the previously selected fields.
 *
 * @return The projected DataStream.
 * @see Tuple
 * @see DataStream
 */
public <T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20> SingleOutputStreamOperator<Tuple21<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>> projectTuple21() {
  TypeInformation<?>[] fTypes = extractFieldTypes(fieldIndexes, dataStream.getType());
  TupleTypeInfo<Tuple21<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>> tType = new TupleTypeInfo<Tuple21<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>>(fTypes);
  return dataStream.transform("Projection", tType, new StreamProject<IN, Tuple21<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>>(fieldIndexes, tType.createSerializer(dataStream.getExecutionConfig())));
}
origin: apache/flink

@SuppressWarnings("unchecked")
@Override
@PublicEvolving
public TupleSerializer<T> createSerializer(ExecutionConfig executionConfig) {
  if (getTypeClass() == Tuple0.class) {
    return (TupleSerializer<T>) Tuple0Serializer.INSTANCE;
  }
  TypeSerializer<?>[] fieldSerializers = new TypeSerializer<?>[getArity()];
  for (int i = 0; i < types.length; i++) {
    fieldSerializers[i] = types[i].createSerializer(executionConfig);
  }
  
  Class<T> tupleClass = getTypeClass();
  
  return new TupleSerializer<T>(tupleClass, fieldSerializers);
}
origin: apache/flink

SimpleTupleFieldAccessor(int pos, TypeInformation<T> typeInfo) {
  checkNotNull(typeInfo, "typeInfo must not be null.");
  int arity = ((TupleTypeInfo) typeInfo).getArity();
  if (pos < 0 || pos >= arity) {
    throw new CompositeType.InvalidFieldReferenceException(
      "Tried to select " + ((Integer) pos).toString() + ". field on \"" +
      typeInfo.toString() + "\", which is an invalid index.");
  }
  this.pos = pos;
  this.fieldType = ((TupleTypeInfo) typeInfo).getTypeAt(pos);
}
origin: apache/flink

@Test
public void testMyTupleHierarchyWithInference() {
  TypeInformation<Tuple1<MyTuple3<Tuple1<Float>>>> inTypeInfo = new TupleTypeInfo<>(new MyTupleTypeInfo<>(
    new TupleTypeInfo<Tuple1<Float>>(FLOAT_TYPE_INFO), BOOLEAN_TYPE_INFO));
  MapFunction<Tuple1<MyTuple3<Tuple1<Float>>>, Tuple1<MyTuple3<Tuple2<Float, String>>>> f = new MyTuple3Mapper<>();
  TypeInformation ti = TypeExtractor.getMapReturnTypes(f, inTypeInfo);
  assertTrue(ti instanceof TupleTypeInfo);
  TupleTypeInfo<?> tti = (TupleTypeInfo) ti;
  assertTrue(tti.getTypeAt(0) instanceof MyTupleTypeInfo);
  MyTupleTypeInfo mtti = (MyTupleTypeInfo) tti.getTypeAt(0);
  assertEquals(new TupleTypeInfo<>(FLOAT_TYPE_INFO, STRING_TYPE_INFO), mtti.getField0());
  assertEquals(BOOLEAN_TYPE_INFO, mtti.getField1());
}
origin: apache/flink

protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst,
    TupleTypeInfo<OUT> returnType, CrossHint hint) {
  super(input1, input2,
      new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
      returnType, hint, "unknown");
  crossProjection = null;
}
origin: apache/flink

RecursiveTupleFieldAccessor(int pos, FieldAccessor<R, F> innerAccessor, TypeInformation<T> typeInfo) {
  checkNotNull(typeInfo, "typeInfo must not be null.");
  checkNotNull(innerAccessor, "innerAccessor must not be null.");
  int arity = ((TupleTypeInfo) typeInfo).getArity();
  if (pos < 0 || pos >= arity) {
    throw new CompositeType.InvalidFieldReferenceException(
      "Tried to select " + ((Integer) pos).toString() + ". field on \"" +
        typeInfo.toString() + "\", which is an invalid index.");
  }
  this.pos = pos;
  this.innerAccessor = innerAccessor;
  this.fieldType = innerAccessor.fieldType;
}
origin: apache/flink

@Test
public void testFunctionDependingOnInputWithFunctionHierarchy2() {
  IdentityMapper5<String> function = new IdentityMapper5<String>();
  @SuppressWarnings({ "rawtypes", "unchecked" })
  TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, new TupleTypeInfo(BasicTypeInfo.STRING_TYPE_INFO,
      BasicTypeInfo.STRING_TYPE_INFO));
  Assert.assertTrue(ti.isTupleType());
  TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
  Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti.getTypeAt(0));
  Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti.getTypeAt(1));
}
origin: apache/flink

protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst,
    TupleTypeInfo<OUT> returnType, CrossProjection<I1, I2> crossProjection, CrossHint hint) {
  super(input1, input2,
    new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
    returnType, hint, "unknown");
  this.crossProjection = crossProjection;
}
origin: apache/flink

  @Override
  public TupleTypeInfo<Tuple2<Boolean, Row>> getOutputType() {
    return new TupleTypeInfo<>(Types.BOOLEAN, getRecordType());
  }
}
origin: apache/flink

/**
 * Projects a {@link Tuple} {@link DataStream} to the previously selected fields.
 *
 * @return The projected DataStream.
 * @see Tuple
 * @see DataStream
 */
public <T0, T1, T2, T3, T4, T5, T6, T7> SingleOutputStreamOperator<Tuple8<T0, T1, T2, T3, T4, T5, T6, T7>> projectTuple8() {
  TypeInformation<?>[] fTypes = extractFieldTypes(fieldIndexes, dataStream.getType());
  TupleTypeInfo<Tuple8<T0, T1, T2, T3, T4, T5, T6, T7>> tType = new TupleTypeInfo<Tuple8<T0, T1, T2, T3, T4, T5, T6, T7>>(fTypes);
  return dataStream.transform("Projection", tType, new StreamProject<IN, Tuple8<T0, T1, T2, T3, T4, T5, T6, T7>>(fieldIndexes, tType.createSerializer(dataStream.getExecutionConfig())));
}
origin: apache/flink

@Test
public void testMyOptionGenericType() {
  TypeInformation<MyOption<Tuple2<Boolean, String>>> inTypeInfo = new MyOptionTypeInfo<>(
    new TupleTypeInfo<Tuple2<Boolean, String>>(BOOLEAN_TYPE_INFO, STRING_TYPE_INFO));
  MapFunction<MyOption<Tuple2<Boolean, String>>, MyOption<Tuple2<Boolean, Boolean>>> f = new MyOptionMapper<>();
  TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(f, inTypeInfo);
  assertTrue(ti instanceof MyOptionTypeInfo);
  MyOptionTypeInfo oti = (MyOptionTypeInfo) ti;
  assertTrue(oti.getInnerType() instanceof TupleTypeInfo);
  TupleTypeInfo tti = (TupleTypeInfo) oti.getInnerType();
  assertEquals(BOOLEAN_TYPE_INFO, tti.getTypeAt(0));
  assertEquals(BOOLEAN_TYPE_INFO, tti.getTypeAt(1));
}
origin: apache/flink

public static TypeInformation<?>[] extractFieldTypes(int[] fields, TypeInformation<?> inType) {
  TupleTypeInfo<?> inTupleType = (TupleTypeInfo<?>) inType;
  TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length];
  for (int i = 0; i < fields.length; i++) {
    fieldTypes[i] = inTupleType.getTypeAt(fields[i]);
  }
  return fieldTypes;
}
origin: apache/flink

protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType) {
  super(input1, input2, keys1, keys2,
      new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
      returnType, hint, Utils.getCallLocationName(4)); // We need to use the 4th element in the stack because the call comes through .types().
  joinProj = null;
}
origin: apache/flink

  @Override
  public TypeInformation<Tuple2<K, V>> getProducedType() {
    return new TupleTypeInfo<>(TypeExtractor.createTypeInfo(keyClass), TypeExtractor.createTypeInfo(valueClass));
  }
}
origin: apache/flink

/**
 * Projects a {@link Tuple} {@link DataStream} to the previously selected fields.
 *
 * @return The projected DataStream.
 * @see Tuple
 * @see DataStream
 */
public <T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22> SingleOutputStreamOperator<Tuple23<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>> projectTuple23() {
  TypeInformation<?>[] fTypes = extractFieldTypes(fieldIndexes, dataStream.getType());
  TupleTypeInfo<Tuple23<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>> tType = new TupleTypeInfo<Tuple23<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>>(fTypes);
  return dataStream.transform("Projection", tType, new StreamProject<IN, Tuple23<T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>>(fieldIndexes, tType.createSerializer(dataStream.getExecutionConfig())));
}
origin: apache/flink

@Test
public void testMyTuple() {
  TypeInformation<Tuple1<MyTuple<Double, String>>> inTypeInfo = new TupleTypeInfo<>(
    new MyTupleTypeInfo(DOUBLE_TYPE_INFO, STRING_TYPE_INFO));
  MapFunction<Tuple1<MyTuple<Double, String>>, Tuple1<MyTuple<Boolean, Double>>> f = new MyTupleMapperL2<>();
  TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(f, inTypeInfo);
  assertTrue(ti instanceof TupleTypeInfo);
  TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
  assertTrue(tti.getTypeAt(0) instanceof MyTupleTypeInfo);
  MyTupleTypeInfo mtti = (MyTupleTypeInfo) tti.getTypeAt(0);
  assertEquals(BOOLEAN_TYPE_INFO, mtti.getField0());
  assertEquals(DOUBLE_TYPE_INFO, mtti.getField1());
}
origin: apache/flink

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testInputInference2() {
  EdgeMapper2<Boolean> em = new EdgeMapper2<Boolean>();
  TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) em, Types.BOOLEAN);
  Assert.assertTrue(ti.isTupleType());
  Assert.assertEquals(3, ti.getArity());
  TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
  Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti.getTypeAt(0));
  Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti.getTypeAt(1));
  Assert.assertEquals(BasicTypeInfo.BOOLEAN_TYPE_INFO, tti.getTypeAt(2));
}

origin: apache/flink

protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType, JoinProjection<I1, I2> joinProj) {
  super(input1, input2, keys1, keys2,
      new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
      returnType, hint, Utils.getCallLocationName(4));
  this.joinProj = joinProj;
}
org.apache.flink.api.java.typeutilsTupleTypeInfo

Javadoc

A TypeInformation for the tuple types of the Java API.

Most used methods

  • <init>
  • getTypeAt
  • createSerializer
  • getArity
  • getBasicAndBasicValueTupleTypeInfo
  • getBasicTupleTypeInfo
  • getFieldIndex
  • getTypeClass
  • canEqual
  • createComparator
  • equals
  • getFlatFields
  • equals,
  • getFlatFields

Popular in Java

  • Running tasks concurrently on multiple threads
  • onCreateOptionsMenu (Activity)
  • notifyDataSetChanged (ArrayAdapter)
  • orElseThrow (Optional)
  • ObjectMapper (com.fasterxml.jackson.databind)
    This mapper (or, data binder, or codec) provides functionality for converting between Java objects (
  • Table (com.google.common.collect)
    A collection that associates an ordered pair of keys, called a row key and a column key, with a sing
  • BigDecimal (java.math)
    An immutable arbitrary-precision signed decimal.A value is represented by an arbitrary-precision "un
  • SortedMap (java.util)
    A map that has its keys ordered. The sorting is according to either the natural ordering of its keys
  • StringTokenizer (java.util)
    The string tokenizer class allows an application to break a string into tokens. The tokenization met
  • Logger (org.slf4j)
    The main user interface to logging. It is expected that logging takes place through concrete impleme
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now