@Override public VALUE load(KEY key) throws Exception { return loadingFunction.apply(key); } });
protected Instant computeTargetTimestamp(Instant time) { Instant result = time; for (SerializableFunction<Instant, Instant> timestampMapper : timestampMappers) { result = timestampMapper.apply(result); } return result; }
@Override public OutputT expand(InputT input) { return fn.apply(input); } };
@Override public Instant getTimestampForRecord(PartitionContext context, KafkaRecord<K, V> record) { lastRecordTimestamp = timestampFn.apply(record); return lastRecordTimestamp; }
@Override public Instant getTimestampForRecord(PartitionContext context, KafkaRecord<K, V> record) { lastRecordTimestamp = timestampFn.apply(record); return lastRecordTimestamp; }
@Override public boolean readNextRecord() throws IOException { if (currentRecordIndex >= numRecords) { return false; } Object record = reader.read(null, decoder); currentRecord = (mode.parseFn == null) ? ((T) record) : mode.parseFn.apply((GenericRecord) record); currentRecordIndex++; return true; }
@Override public TableDestination getDestination(ValueInSingleWindow<T> element) { TableDestination res = tableFunction.apply(element); checkArgument( res != null, "result of tableFunction can not be null, but %s returned null for element: %s", tableFunction, element); return res; }
private List<V> mergeToSingleton(Iterable<V> values) { List<V> singleton = new ArrayList<>(); singleton.add(combiner.apply(values)); return singleton; }
@Override public Object[] addInput(Object[] accumulator, DataT value) { for (int i = 0; i < combineFnCount; ++i) { Object input = extractInputFns.get(i).apply(value); accumulator[i] = combineFns.get(i).addInput(accumulator[i], input); } return accumulator; }
@ProcessElement public void processElement(@Element T element, OutputReceiver<T> r) { if (predicate.apply(element)) { r.output(element); } } }))
private static <KeyT, ValueT, OutT> SerializableFunction<KafkaRecord<KeyT, ValueT>, OutT> unwrapKafkaAndThen( final SerializableFunction<KV<KeyT, ValueT>, OutT> fn) { return record -> fn.apply(record.getKV()); } ///////////////////////////////////////////////////////////////////////////////////////
@Override public Instant getTimestampForRecord(PartitionContext ctx, KafkaRecord<K, V> record) { Instant ts = timestampFunction.apply(record); if (ts.isAfter(maxEventTimestamp)) { maxEventTimestamp = ts; } return ts; }
@Override public T apply(GenericRecord input) { return parseFn.apply(new SchemaAndRecord(input, schema.get())); } };
@Override public Instant getTimestampForRecord(PartitionContext ctx, KafkaRecord<K, V> record) { Instant ts = timestampFunction.apply(record); if (ts.isAfter(maxEventTimestamp)) { maxEventTimestamp = ts; } return ts; }
@Setup public void setup() { if (spec.getProducerFactoryFn() != null) { producer = spec.getProducerFactoryFn().apply(producerConfig); } else { producer = new KafkaProducer<>(producerConfig); } }
@Setup public void setup() { if (spec.getProducerFactoryFn() != null) { producer = spec.getProducerFactoryFn().apply(producerConfig); } else { producer = new KafkaProducer<>(producerConfig); } }
@ProcessElement public void processElement(@Element T element, OutputReceiver<T> r) { Instant timestamp = fn.apply(element); checkNotNull( timestamp, "Timestamps for WithTimestamps cannot be null. Timestamp provided by %s.", fn); r.outputWithTimestamp(element, timestamp); }
@Override public Row apply(T input) { Row row = toRowFunction.apply(input); return Select.selectRow( row, fieldAggregation.fieldsToAggregate, row.getSchema(), fieldAggregation.inputSubSchema); } }
/** * Wraps a {@link SerializableFunction} as a {@link Contextful} of {@link Fn} with empty {@link * Requirements}. */ public static <InputT, OutputT> Contextful<Fn<InputT, OutputT>> fn( final SerializableFunction<InputT, OutputT> fn) { return new Contextful<>((element, c) -> fn.apply(element), Requirements.empty()); }
@ProcessElement public void processElement(@Element Row row, OutputReceiver<OutputT> o) { o.output(outputSchemaCoder.getFromRowFunction().apply(row)); } }))