@Category(NeedsRunner.class) @Test public void testMissingRunWithDisabledEnforcement() throws Exception { pipeline.enableAbandonedNodeEnforcement(false); addTransform(pCollection(pipeline)); // disable abandoned node detection }
@Test(expected = UnsupportedOperationException.class) public void testException_crossJoin() throws Exception { String sql = "SELECT * " + "FROM ORDER_DETAILS1 o1, ORDER_DETAILS2 o2"; pipeline.enableAbandonedNodeEnforcement(false); queryFromOrderTables(sql); pipeline.run(); }
@Test(expected = IllegalArgumentException.class) public void testOutputChecking() throws Exception { p.enableAbandonedNodeEnforcement(false); p.apply(new InvalidCompositeTransform()); p.traverseTopologically(new Pipeline.PipelineVisitor.Defaults() {}); }
@Test public void testCountConstraint() { p.enableAbandonedNodeEnforcement(false); PCollection<String> input = p.apply(Create.of(Arrays.asList(COLLECTION)).withCoder(StringUtf8Coder.of())); expectedEx.expect(IllegalArgumentException.class); expectedEx.expectMessage(Matchers.containsString(">= 0")); input.apply(Top.of(-1, new OrderByLength())); }
@Test public void testUnsupportedDistinct() throws Exception { exceptions.expect(ParseException.class); exceptions.expectCause(hasMessage(containsString("Encountered \"*\""))); pipeline.enableAbandonedNodeEnforcement(false); String sql = "SELECT f_int2, COUNT(DISTINCT *) AS `size` " + "FROM PCOLLECTION GROUP BY f_int2"; PCollection<Row> result = boundedInput1.apply("testUnsupportedDistinct", SqlTransform.query(sql)); pipeline.run().waitUntilFinish(); }
@Test public void testParDoOutputNameBasedOnDoFnWithTrimmedSuffix() { pipeline.enableAbandonedNodeEnforcement(false); PCollection<String> output = pipeline.apply(Create.of(1)).apply(ParDo.of(new TestDoFn())); assertThat(output.getName(), containsString("ParDo(Test)")); }
@Test public void testParDoOutputNameBasedOnLabel() { pipeline.enableAbandonedNodeEnforcement(false); PCollection<String> output = pipeline.apply(Create.of(1)).apply("MyParDo", ParDo.of(new TestDoFn())); assertThat(output.getName(), containsString("MyParDo")); }
@Test public void testRuntimeOptionsNotCalledInApply() throws Exception { p.enableAbandonedNodeEnforcement(false); RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); p.apply(TextIO.read().from(options.getInput())); }
@Test public void testCreateTimestampedEmptyUnspecifiedCoder() { p.enableAbandonedNodeEnforcement(false); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("determine a default Coder"); thrown.expectMessage("Create.empty(Coder)"); thrown.expectMessage("Create.empty(TypeDescriptor)"); thrown.expectMessage("withCoder(Coder)"); thrown.expectMessage("withType(TypeDescriptor)"); p.apply(Create.timestamped(new ArrayList<>())); }
@Test public void testProjectUnknownField() throws Exception { exceptions.expect(ParseException.class); exceptions.expectCause(hasMessage(containsString("Column 'f_int_na' not found in any table"))); pipeline.enableAbandonedNodeEnforcement(false); String sql = "SELECT f_int_na FROM TABLE_A"; PCollection<Row> result = PCollectionTuple.of(new TupleTag<>("TABLE_A"), boundedInput1) .apply("testProjectUnknownField", SqlTransform.query(sql)); pipeline.run().waitUntilFinish(); } }
@Test public void testRuntimeOptionsNotCalledInApply() throws Exception { p.enableAbandonedNodeEnforcement(false); RuntimeTestOptions options = PipelineOptionsFactory.as(RuntimeTestOptions.class); p.apply(Create.of("")).apply(TextIO.write().to(options.getOutput())); }
@Test(expected = IllegalArgumentException.class) public void testSampleNegative() { pipeline.enableAbandonedNodeEnforcement(false); PCollection<Integer> input = pipeline.apply( Create.of(ImmutableList.copyOf(DATA)).withCoder(BigEndianIntegerCoder.of())); input.apply(Sample.fixedSizeGlobally(-1)); }
@Test public void testReadNamed() throws Exception { File emptyFile = tempFolder.newFile(); p.enableAbandonedNodeEnforcement(false); assertEquals("TextIO.Read/Read.out", p.apply(TextIO.read().from("somefile")).getName()); assertEquals( "MyRead/Read.out", p.apply("MyRead", TextIO.read().from(emptyFile.getPath())).getName()); }
@Test public void testUnsupportedGlobalWindowWithDefaultTrigger() { exceptions.expect(UnsupportedOperationException.class); pipeline.enableAbandonedNodeEnforcement(false); PCollection<Row> input = unboundedInput1.apply( "unboundedInput1.globalWindow", Window.<Row>into(new GlobalWindows()).triggering(DefaultTrigger.of())); String sql = "SELECT f_int2, COUNT(*) AS `size` FROM PCOLLECTION GROUP BY f_int2"; input.apply("testUnsupportedGlobalWindows", SqlTransform.query(sql)); }
@Test public void testFinishSpecifyingShouldFailIfNoCoderInferrable() { p.enableAbandonedNodeEnforcement(false); PCollection<Integer> created = p.apply(Create.of(1, 2, 3)); ParDo.SingleOutput<Integer, EmptyClass> uninferrableParDo = ParDo.of(new EmptyClassDoFn()); PCollection<EmptyClass> unencodable = created.apply(uninferrableParDo); thrown.expect(IllegalStateException.class); thrown.expectMessage("Unable to return a default Coder"); thrown.expectMessage("Inferring a Coder from the CoderRegistry failed"); unencodable.finishSpecifying(created, uninferrableParDo); } }
@Test public void testStableUniqueNameWarning() { pipeline.enableAbandonedNodeEnforcement(false); pipeline.getOptions().setStableUniqueNames(CheckEnabled.WARNING); pipeline.apply(Create.of(5, 6, 7)); pipeline.apply(Create.of(5, 6, 7)); ((Pipeline) pipeline).validate(pipeline.getOptions()); logged.verifyWarn("do not have stable unique names"); }
@Test public void testPerKeyOutputCoder() { p.enableAbandonedNodeEnforcement(false); KvCoder<String, Long> inputCoder = KvCoder.of(AvroCoder.of(String.class), AvroCoder.of(Long.class)); PCollection<KV<String, Long>> output = p.apply(Create.of(KV.of("foo", 1L)).withCoder(inputCoder)).apply(Latest.perKey()); assertEquals("Should use input coder for outputs", inputCoder, output.getCoder()); }
@Test(expected = NullPointerException.class) public void testJoinNullValueIsNull() { p.enableAbandonedNodeEnforcement(false); Join.leftOuterJoin( p.apply("CreateLeft", Create.empty(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of()))), p.apply( "CreateRight", Create.empty(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()))), null); } }
/** Creates a simple pipeline with a {@link Combine.GroupedValues}. */ private static TestPipeline createCombineGroupedValuesPipeline() { TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false); PCollection<KV<String, Integer>> input = pipeline .apply(Create.of(KV.of("key", 1))) .setCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of())); input.apply(GroupByKey.create()).apply(Combine.groupedValues(new SumCombineFn())); return pipeline; }
@Test public void testReadNamed() { writePipeline.enableAbandonedNodeEnforcement(false); assertEquals( "TFRecordIO.Read/Read.out", writePipeline.apply(TFRecordIO.read().from("foo.*").withoutValidation()).getName()); assertEquals( "MyRead/Read.out", writePipeline .apply("MyRead", TFRecordIO.read().from("foo.*").withoutValidation()) .getName()); }