@Override public Coder<Void> getWriterResultCoder() { return VoidCoder.of(); } }
@SuppressWarnings("unchecked") private static <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
private ParquetHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, (Class) AvroParquetInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
private ExcelHdfsFileSource(UgiDoAs doAs, String filepattern, LazyAvroCoder<IndexedRecord> lac, ExtraHadoopConfiguration extraConfig, SerializableSplit serializableSplit) { super(doAs, filepattern, ExcelFileInputFormat.class, Void.class, IndexedRecord.class, extraConfig, serializableSplit); this.lac = lac; setDefaultCoder(VoidCoder.of(), (LazyAvroCoder) lac); }
@SuppressWarnings("unchecked") private <T> Coder<T> getDefaultCoder(Class<T> c) { if (Writable.class.isAssignableFrom(c)) { Class<? extends Writable> writableClass = (Class<? extends Writable>) c; return (Coder<T>) WritableCoder.of(writableClass); } else if (Void.class.equals(c)) { return (Coder<T>) VoidCoder.of(); } // TODO: how to use registered coders here? throw new IllegalStateException("Cannot find coder for " + c); }
@Override public PCollection<T> expand(PCollectionList<T> input) { return (PCollection) input.getPipeline().apply(Create.empty(VoidCoder.of())); } }
@Override public PCollection<KV<Void, T>> expand(PCollection<T> input) { PCollection output = input.apply(ParDo.of(new VoidKeyToMultimapMaterializationDoFn<>())); output.setCoder(KvCoder.of(VoidCoder.of(), input.getCoder())); return output; } }
@Test public void testTypeIsPreserved() throws Exception { assertThat(VoidCoder.of().getEncodedTypeDescriptor(), equalTo(TypeDescriptor.of(Void.class))); } }
@Test public void parDoWithFnTypeNotParDo() { AppliedPTransform<?, ?, ?> notParDo = getAppliedTransform(Create.empty(VoidCoder.of())); PTransformMatcher matcher = PTransformMatchers.parDoWithFnType(doFnWithState.getClass()); assertThat(matcher.matches(notParDo), is(false)); }
public static RunnerApi.TimerSpec translateTimerSpec(TimerSpec timer, SdkComponents components) { return RunnerApi.TimerSpec.newBuilder() .setTimeDomain(translateTimeDomain(timer.getTimeDomain())) // TODO: Add support for timer payloads to the SDK // We currently assume that all payloads are unspecified. .setTimerCoderId(registerCoderOrThrow(components, Timer.Coder.of(VoidCoder.of()))) .build(); }
@Test public void testNonDeterministicExceptionMultipleReasons() { NonDeterministicException rootCause = new NonDeterministicException(VoidCoder.of(), "Root Cause"); NonDeterministicException exception = new NonDeterministicException( StringUtf8Coder.of(), Arrays.asList("Problem1", "Problem2"), rootCause); String expectedMessage = "StringUtf8Coder is not deterministic because:\n\tProblem1\n\tProblem2"; assertThat(exception.getMessage(), equalTo(expectedMessage)); }
@Test public void noInputUnkeyedOutput() { PCollection<KV<Integer, Iterable<Void>>> unkeyed = p.apply( Create.of(KV.<Integer, Iterable<Void>>of(-1, Collections.emptyList())) .withCoder(KvCoder.of(VarIntCoder.of(), IterableCoder.of(VoidCoder.of())))); p.traverseTopologically(visitor); assertThat(visitor.getKeyedPValues(), not(hasItem(unkeyed))); }
@Override public PCollection<ElemT> expand(final PCollection<ElemT> input) { input .apply(WithKeys.of((Void) null)) .setCoder(KvCoder.of(VoidCoder.of(), input.getCoder())) .apply(GroupByKey.create()) .apply(Values.create()) .apply(new WriteView<>(view)); return input; } }
@Test public void testSourceSplitVoid() throws Exception { CreateSource<Void> source = CreateSource.fromIterable(Lists.newArrayList(null, null, null, null, null), VoidCoder.of()); PipelineOptions options = PipelineOptionsFactory.create(); List<? extends BoundedSource<Void>> splitSources = source.split(3, options); SourceTestUtils.assertSourcesEqualReferenceSource(source, splitSources, options); }
@Override public PDone write(PCollection<IndexedRecord> in) { ParquetHdfsFileSink sink = new ParquetHdfsFileSink(doAs, path, overwrite, mergeOutput); sink.getExtraHadoopConfiguration().addFrom(getExtraHadoopConfiguration()); PCollection<KV<Void, IndexedRecord>> pc1 = in.apply(ParDo.of(new FormatParquet())); pc1 = pc1.setCoder(KvCoder.of(VoidCoder.of(), LazyAvroCoder.of())); if (in.isBounded() == PCollection.IsBounded.BOUNDED) { return pc1.apply(Write.to(sink)); } else { return pc1.apply(UnboundedWrite.of(sink)); } }
@Override public PDone write(PCollection<IndexedRecord> in) { ParquetHdfsFileSink sink = new ParquetHdfsFileSink(doAs, path, overwrite, mergeOutput); sink.getExtraHadoopConfiguration().addFrom(getExtraHadoopConfiguration()); PCollection<KV<Void, IndexedRecord>> pc1 = in.apply(ParDo.of(new FormatParquet())); pc1 = pc1.setCoder(KvCoder.of(VoidCoder.of(), LazyAvroCoder.of())); if (in.isBounded() == PCollection.IsBounded.BOUNDED) { return pc1.apply(Write.to(sink)); } else { return pc1.apply(UnboundedWrite.of(sink)); } }
@Test public void testListSideInputTranslation() throws Exception { assertEquals( ListCoder.of(KvCoder.of(VoidCoder.of(), VarIntCoder.of())), getTranslatedSideInputCoder(ImmutableList.of(11, 13, 17, 23), View.asList())); }
@Test public void testMapSideInputTranslation() throws Exception { assertEquals( ListCoder.of( KvCoder.of(VoidCoder.of(), KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))), getTranslatedSideInputCoder(ImmutableList.of(KV.of("a", 1), KV.of("b", 3)), View.asMap())); }