/** * Shallow tuple copy. * @return A new Tuple with the same fields as this. */ @Override @SuppressWarnings("unchecked") public Tuple2<T0, T1> copy() { return new Tuple2<>(this.f0, this.f1); }
/** * Adds a data set as a broadcast set to the apply function. * * @param name The name under which the broadcast data is available in the apply function. * @param data The data set to be broadcast. */ public void addBroadcastSetForApplyFunction(String name, DataSet<?> data) { this.bcVarsApply.add(new Tuple2<>(name, data)); }
@Override public Tuple2<Integer, Integer> join(Tuple2<Integer, Integer> first, Tuple1<Integer> second) throws Exception { return new Tuple2<Integer, Integer>(first.f0, first.f1 + 1); } });
public Tuple2<String, String> map(Tuple2<Tuple1<String>, Integer> value) throws Exception { Tuple2<String, String> t = new Tuple2<String, String>(); t.f0 = value.f0.f0; return t; } }
@Override public Tuple2<Integer, Long> getKey(Tuple4<Integer, Long, CustomType, Long[]> value) throws Exception { return new Tuple2<>(value.f0, value.f1); } }, Order.ASCENDING);
@Override public Tuple2<Long, Long> map(Long value) { return new Tuple2<Long, Long>(value, value); } }).name("Assign Vertex Ids");
@Override public void mapPartition(Iterable<T> values, Collector<Tuple2<Long, T>> out) throws Exception { for (T value: values) { out.collect(new Tuple2<>(start++, value)); } } }).withBroadcastSet(elementCount, "counts");
@Override public List<Tuple2<Integer, BitSet>> snapshotState(long checkpointId, long timestamp) throws Exception { LOG.info("Snapshot of counter " + numElements + " at checkpoint " + checkpointId); return Collections.singletonList(new Tuple2<>(numElements, duplicateChecker)); }
@Test(expected = IllegalArgumentException.class) public void testRangePartitionByComplexKeyWithTooManyOrders() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); final DataSource<Tuple2<Tuple2<Integer, Integer>, Integer>> ds = env.fromElements( new Tuple2<>(new Tuple2<>(1, 1), 1), new Tuple2<>(new Tuple2<>(2, 2), 2), new Tuple2<>(new Tuple2<>(2, 2), 2) ); ds.partitionByRange(0).withOrders(Order.ASCENDING, Order.DESCENDING); }
@Override public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long[]>> out) { neighbors.clear(); Long id = 0L; for (Tuple2<Long, Long> n : values) { id = n.f0; neighbors.add(n.f1); } out.collect(new Tuple2<Long, Long[]>(id, neighbors.toArray(new Long[neighbors.size()]))); } }
@Override public void process( Context ctx, Iterable<Tuple3<String, String, Integer>> values, Collector<Tuple2<String, Integer>> out) throws Exception { for (Tuple3<String, String, Integer> in : values) { out.collect(new Tuple2<>(in.f0, in.f2)); } } });
@Override public void mapPartition(Iterable<T> values, Collector<Tuple2<Integer, Long>> out) throws Exception { long counter = 0; for (T value : values) { counter++; } out.collect(new Tuple2<>(getRuntimeContext().getIndexOfThisSubtask(), counter)); } });
public CustomTypeWithTuple(int i, long l, String s) { myInt = i; myLong = l; myString = s; nested = new NestedCustomType(i, l, s); intByString = new Tuple2<Integer, String>(i, s); }
@Test public void testWriteRecord() throws Exception { OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class); DummyRecordWriter recordWriter = mock(DummyRecordWriter.class); JobConf jobConf = mock(JobConf.class); HadoopOutputFormat<String, Long> outputFormat = new HadoopOutputFormat<>(dummyOutputFormat, jobConf); outputFormat.recordWriter = recordWriter; outputFormat.writeRecord(new Tuple2<>("key", 1L)); verify(recordWriter, times(1)).write(anyString(), anyLong()); }
public void join(Vertex<K, VV> vertex, Edge<K, EV> edge, Collector<Tuple2<K, Neighbor<VV, EV>>> out) { out.collect(new Tuple2<>( edge.getTarget(), new Neighbor<>(vertex.getValue(), edge.getValue()))); } }
public void join(Vertex<K, VV> vertex, Edge<K, EV> edge, Collector<Tuple2<K, Neighbor<VV, EV>>> out) { out.collect(new Tuple2<>( edge.getSource(), new Neighbor<>(vertex.getValue(), edge.getValue()))); } }
@Override public void run(SourceContext<Tuple2<Integer, Integer>> ctx) throws Exception { int cnt = 0; int partition = getRuntimeContext().getIndexOfThisSubtask(); while (running && cnt < numElements) { ctx.collect(new Tuple2<>(partition, cnt)); cnt++; } }
@Test(expected = SemanticProperties.InvalidSemanticAnnotationException.class) public void testBinaryForwardedOverwritingInLine1() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(3L, 4L)); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input2 = env.fromElements(new Tuple2<Long, Long>(3L, 2L)); input1.join(input2).where(0).equalTo(0).with(new ForwardedFirstAnnotationJoin<Long>()) .withForwardedFieldsFirst("0->1"); }
@Test(expected = SemanticProperties.InvalidSemanticAnnotationException.class) public void testBinaryForwardedOverwritingInLine2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(3L, 4L)); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input2 = env.fromElements(new Tuple2<Long, Long>(3L, 2L)); input1.join(input2).where(0).equalTo(0).with(new ForwardedSecondAnnotationJoin<Long>()) .withForwardedFieldsSecond("0->1"); }
@Test(expected = SemanticProperties.InvalidSemanticAnnotationException.class) public void testBinaryForwardedOverwritingInLine4() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(3L, 4L)); @SuppressWarnings("unchecked") DataSet<Tuple2<Long, Long>> input2 = env.fromElements(new Tuple2<Long, Long>(3L, 2L)); input1.join(input2).where(0).equalTo(0).with(new ForwardedBothAnnotationJoin<Long, Long, Long, Long>()) .withForwardedFieldsSecond("0->1;"); }