@Override public Coder<Void> getWriterResultCoder() { return VoidCoder.of(); } }
@Test public void testTypeIsPreserved() throws Exception { assertThat(VoidCoder.of().getEncodedTypeDescriptor(), equalTo(TypeDescriptor.of(Void.class))); } }
@Override public void add(T input) { try { ListState<T> partitionedState = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); if (storesVoidValues) { Preconditions.checkState(input == null, "Expected to a null value but was: %s", input); // Flink does not allow storing null values // If we have null values, we use the structural null value input = (T) VoidCoder.of().structuralValue((Void) input); } partitionedState.add(input); } catch (Exception e) { throw new RuntimeException("Error adding to bag state.", e); } }
@SuppressWarnings("unchecked") private static <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
private ParquetHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, (Class) AvroParquetInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
@SuppressWarnings("unchecked") private <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); }
@Override public Coder<Void> getAccumulatorCoder(CoderRegistry registry, Coder<Integer> inputCoder) { return (Coder) VoidCoder.of(); }
private ParquetHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, (Class) AvroParquetInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
@SuppressWarnings("unchecked") private <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); }
@Override public PCollection<T> expand(PCollectionList<T> input) { return (PCollection) input.getPipeline().apply(Create.empty(VoidCoder.of())); } }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, int limit, String encoding, String sheetName, long header, long footer, String excelFormat, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); ExtraHadoopConfiguration hadoop_config = getExtraHadoopConfiguration(); hadoop_config.set(ExcelFileInputFormat.TALEND_ENCODING, encoding); hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_SHEET_NAME, sheetName); hadoop_config.set(ExcelFileInputFormat.TALEND_HEADER, String.valueOf(header)); hadoop_config.set(ExcelFileInputFormat.TALEND_FOOTER, String.valueOf(footer)); hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_FORMAT, excelFormat); //set it to the reader for performance hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_LIMIT, String.valueOf(limit)); }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, int limit, String encoding, String sheetName, long header, long footer, String excelFormat, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); ExtraHadoopConfiguration hadoop_config = getExtraHadoopConfiguration(); hadoop_config.set(ExcelFileInputFormat.TALEND_ENCODING, encoding); hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_SHEET_NAME, sheetName); hadoop_config.set(ExcelFileInputFormat.TALEND_HEADER, String.valueOf(header)); hadoop_config.set(ExcelFileInputFormat.TALEND_FOOTER, String.valueOf(footer)); hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_FORMAT, excelFormat); //set it to the reader for performance hadoop_config.set(ExcelFileInputFormat.TALEND_EXCEL_LIMIT, String.valueOf(limit)); }
@Test public void keyedWithNullKeyShouldCreateKeyedBundle() throws Exception { createKeyedBundle(VoidCoder.of(), null); }
@Override public void translate(Flatten.PCollections<T> transform, TranslationContext context) { List<PCollection<T>> inputCollections = extractPCollections(context.getInputs()); if (inputCollections.isEmpty()) { // create a dummy source that never emits anything @SuppressWarnings("unchecked") UnboundedSource<T, ?> unboundedSource = new ValuesSource<>(Collections.EMPTY_LIST, VoidCoder.of()); ApexReadUnboundedInputOperator<T, ?> operator = new ApexReadUnboundedInputOperator<>(unboundedSource, context.getPipelineOptions()); context.addOperator(operator, operator.output); } else if (inputCollections.size() == 1) { context.addAlias(context.getOutput(), inputCollections.get(0)); } else { @SuppressWarnings("unchecked") PCollection<T> output = (PCollection<T>) context.getOutput(); Map<PCollection<?>, Integer> unionTags = Collections.emptyMap(); flattenCollections(inputCollections, unionTags, output, context); } }
@Test public void keyedWithNullKeyShouldCreateKeyedBundle() throws Exception { createKeyedBundle(VoidCoder.of(), null); }
@Override public PCollection<KV<Void, T>> expand(PCollection<T> input) { PCollection output = input.apply(ParDo.of(new VoidKeyToMultimapMaterializationDoFn<>())); output.setCoder(KvCoder.of(VoidCoder.of(), input.getCoder())); return output; } }
@Test public void invalidSideInputThrowsException() { ExecutableStage stage = createExecutableStage(Collections.emptyList()); FlinkBatchSideInputHandlerFactory factory = FlinkBatchSideInputHandlerFactory.forStage(stage, context); thrown.expect(instanceOf(IllegalArgumentException.class)); factory.forSideInput( "transform-id", "side-input", MULTIMAP_ACCESS, KvCoder.of(VoidCoder.of(), VoidCoder.of()), GlobalWindow.Coder.INSTANCE); }
@Test public void invalidSideInputThrowsException() { ExecutableStage stage = createExecutableStage(Collections.emptyList()); FlinkBatchSideInputHandlerFactory factory = FlinkBatchSideInputHandlerFactory.forStage(stage, context); thrown.expect(instanceOf(IllegalArgumentException.class)); factory.forSideInput( "transform-id", "side-input", MULTIMAP_ACCESS, KvCoder.of(VoidCoder.of(), VoidCoder.of()), GlobalWindow.Coder.INSTANCE); }