@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean2() { CircularReference3Bean bean = new CircularReference3Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference3Bean.class)); }
@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean3() { CircularReference4Bean bean = new CircularReference4Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference4Bean.class)); }
@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean1() { CircularReference1Bean bean = new CircularReference1Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference1Bean.class)); }
@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean2() { CircularReference3Bean bean = new CircularReference3Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference3Bean.class)); }
@Test(expected = RuntimeException.class) public void testNullInTopLevelBean() { NestedSmallBean bean = new NestedSmallBean(); // We cannot set null in top-level bean spark.createDataset(Arrays.asList(bean, null), Encoders.bean(NestedSmallBean.class)); }
@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean1() { CircularReference1Bean bean = new CircularReference1Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference1Bean.class)); }
@Test(expected = UnsupportedOperationException.class) public void testCircularReferenceBean2() { CircularReference3Bean bean = new CircularReference3Bean(); spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference3Bean.class)); }
@Test public void testJavaBeanEncoder2() { // This is a regression test of SPARK-12404 OuterScopes.addOuterScope(this); SimpleJavaBean2 obj = new SimpleJavaBean2(); obj.setA(new Timestamp(0)); obj.setB(new Date(0)); obj.setC(java.math.BigDecimal.valueOf(1)); Dataset<SimpleJavaBean2> ds = spark.createDataset(Arrays.asList(obj), Encoders.bean(SimpleJavaBean2.class)); ds.collect(); }
@Test public void testJavaBeanEncoder2() { // This is a regression test of SPARK-12404 OuterScopes.addOuterScope(this); SimpleJavaBean2 obj = new SimpleJavaBean2(); obj.setA(new Timestamp(0)); obj.setB(new Date(0)); obj.setC(java.math.BigDecimal.valueOf(1)); Dataset<SimpleJavaBean2> ds = spark.createDataset(Arrays.asList(obj), Encoders.bean(SimpleJavaBean2.class)); ds.collect(); }
@Test public void test() { /* SPARK-15285 Large numbers of Nested JavaBeans generates more than 64KB java bytecode */ List<NestedComplicatedJavaBean> data = new ArrayList<>(); data.add(NestedComplicatedJavaBean.newBuilder().build()); NestedComplicatedJavaBean obj3 = new NestedComplicatedJavaBean(); Dataset<NestedComplicatedJavaBean> ds = spark.createDataset(data, Encoders.bean(NestedComplicatedJavaBean.class)); ds.collectAsList(); }
@Test public void test() { /* SPARK-15285 Large numbers of Nested JavaBeans generates more than 64KB java bytecode */ List<NestedComplicatedJavaBean> data = new ArrayList<>(); data.add(NestedComplicatedJavaBean.newBuilder().build()); NestedComplicatedJavaBean obj3 = new NestedComplicatedJavaBean(); Dataset<NestedComplicatedJavaBean> ds = spark.createDataset(data, Encoders.bean(NestedComplicatedJavaBean.class)); ds.collectAsList(); }
@Test public void test() { /* SPARK-15285 Large numbers of Nested JavaBeans generates more than 64KB java bytecode */ List<NestedComplicatedJavaBean> data = new ArrayList<>(); data.add(NestedComplicatedJavaBean.newBuilder().build()); NestedComplicatedJavaBean obj3 = new NestedComplicatedJavaBean(); Dataset<NestedComplicatedJavaBean> ds = spark.createDataset(data, Encoders.bean(NestedComplicatedJavaBean.class)); ds.collectAsList(); }
@Test public void testBeanWithEnum() { List<BeanWithEnum> data = Arrays.asList(new BeanWithEnum(MyEnum.A, "mira avenue"), new BeanWithEnum(MyEnum.B, "flower boulevard")); Encoder<BeanWithEnum> encoder = Encoders.bean(BeanWithEnum.class); Dataset<BeanWithEnum> ds = spark.createDataset(data, encoder); Assert.assertEquals(ds.collectAsList(), data); }
@Test public void testEmptyBean() { EmptyBean bean = new EmptyBean(); List<EmptyBean> data = Arrays.asList(bean); Dataset<EmptyBean> df = spark.createDataset(data, Encoders.bean(EmptyBean.class)); Assert.assertEquals(df.schema().length(), 0); Assert.assertEquals(df.collectAsList().size(), 1); }
@Test public void testEmptyBean() { EmptyBean bean = new EmptyBean(); List<EmptyBean> data = Arrays.asList(bean); Dataset<EmptyBean> df = spark.createDataset(data, Encoders.bean(EmptyBean.class)); Assert.assertEquals(df.schema().length(), 0); Assert.assertEquals(df.collectAsList().size(), 1); }
@Test public void testSerializeNull() { NestedSmallBean bean = new NestedSmallBean(); Encoder<NestedSmallBean> encoder = Encoders.bean(NestedSmallBean.class); List<NestedSmallBean> beans = Arrays.asList(bean); Dataset<NestedSmallBean> ds1 = spark.createDataset(beans, encoder); Assert.assertEquals(beans, ds1.collectAsList()); Dataset<NestedSmallBean> ds2 = ds1.map((MapFunction<NestedSmallBean, NestedSmallBean>) b -> b, encoder); Assert.assertEquals(beans, ds2.collectAsList()); }
@Test public void testSerializeNull() { NestedSmallBean bean = new NestedSmallBean(); Encoder<NestedSmallBean> encoder = Encoders.bean(NestedSmallBean.class); List<NestedSmallBean> beans = Arrays.asList(bean); Dataset<NestedSmallBean> ds1 = spark.createDataset(beans, encoder); Assert.assertEquals(beans, ds1.collectAsList()); Dataset<NestedSmallBean> ds2 = ds1.map((MapFunction<NestedSmallBean, NestedSmallBean>) b -> b, encoder); Assert.assertEquals(beans, ds2.collectAsList()); } }
@Test public void testBeanWithArrayFieldDeserialization() { Encoder<Record> encoder = Encoders.bean(Record.class); Dataset<Record> dataset = spark .read() .format("json") .schema("id int, intervals array<struct<startTime: bigint, endTime: bigint>>") .load("src/test/resources/test-data/with-array-fields.json") .as(encoder); List<Record> records = dataset.collectAsList(); Assert.assertEquals(records, RECORDS); }
@Test public void testBeanWithArrayFieldDeserialization() { Encoder<Record> encoder = Encoders.bean(Record.class); Dataset<Record> dataset = spark .read() .format("json") .schema("id int, intervals array<struct<startTime: bigint, endTime: bigint>>") .load("src/test/resources/test-data/with-array-fields.json") .as(encoder); List<Record> records = dataset.collectAsList(); Assert.assertEquals(records, RECORDS); }
@Test public void testBeanWithArrayFieldDeserialization() { Encoder<Record> encoder = Encoders.bean(Record.class); Dataset<Record> dataset = spark .read() .format("json") .schema(createSchema()) .load("src/test/resources/test-data/with-array-fields.json") .as(encoder); List<Record> records = dataset.collectAsList(); Assert.assertEquals(records, RECORDS); }