@Test public void testDistinctWindow() { MaterializedResult actual = computeActual( "SELECT RANK() OVER (PARTITION BY orderdate ORDER BY COUNT(DISTINCT clerk)) rnk " + "FROM orders " + "GROUP BY orderdate, custkey " + "ORDER BY rnk " + "LIMIT 1"); MaterializedResult expected = resultBuilder(getSession(), BIGINT).row(1L).build(); assertEquals(actual, expected); }
@Test public void testLongArrayApproxPercentile() { assertWindowQuery("approx_percentile(12, array[0.3, 0.7]) over ()", resultBuilder(TEST_SESSION, INTEGER, VARCHAR, new ArrayType(BIGINT)) .row(1, "O", ImmutableList.of(12L, 12L)) .row(2, "O", ImmutableList.of(12L, 12L)) .row(3, "F", ImmutableList.of(12L, 12L)) .row(4, "O", ImmutableList.of(12L, 12L)) .row(5, "F", ImmutableList.of(12L, 12L)) .row(6, "F", ImmutableList.of(12L, 12L)) .row(7, "O", ImmutableList.of(12L, 12L)) .row(32, "O", ImmutableList.of(12L, 12L)) .row(33, "F", ImmutableList.of(12L, 12L)) .row(34, "O", ImmutableList.of(12L, 12L)) .build()); }
@Test public void testUniqueGroupingValues() { RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, VARCHAR, BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(10, 0, 0, 0) .addSequencePage(10, 0, 10, 10) .build(); MaterializedResult.Builder builder = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT); for (int i = 0; i < 20; i++) { builder.row(format("%s", i), 1L, Long.valueOf(i)); } assertOperatorEquals(operatorFactory, driverContext, input, builder.build()); }
@Test public void testUnnestWithArray() { MetadataManager metadata = createTestMetadataManager(); Type arrayType = metadata.getType(parseTypeSignature("array(array(bigint))")); Type mapType = metadata.getType(parseTypeSignature("map(array(bigint),array(bigint))")); List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType) .row( 1L, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(2, 4), ImmutableList.of(3, 6)), mapBlockOf(new ArrayType(BIGINT), new ArrayType(BIGINT), ImmutableMap.of(ImmutableList.of(4, 8), ImmutableList.of(5, 10)))) .row(2L, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(99, 198)), null) .row(3L, null, null) .pageBreak() .row( 6, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(7, 14), ImmutableList.of(8, 16)), mapBlockOf(new ArrayType(BIGINT), new ArrayType(BIGINT), ImmutableMap.of(ImmutableList.of(9, 18), ImmutableList.of(10, 20), ImmutableList.of(11, 22), ImmutableList.of(12, 24)))) .build(); OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory( 0, new PlanNodeId("test"), ImmutableList.of(0), ImmutableList.of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType), false); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, new ArrayType(BIGINT), new ArrayType(BIGINT), new ArrayType(BIGINT)) .row(1L, ImmutableList.of(2L, 4L), ImmutableList.of(4L, 8L), ImmutableList.of(5L, 10L)) .row(1L, ImmutableList.of(3L, 6L), null, null) .row(2L, ImmutableList.of(99L, 198L), null, null) .row(6L, ImmutableList.of(7L, 14L), ImmutableList.of(9L, 18L), ImmutableList.of(10L, 20L)) .row(6L, ImmutableList.of(8L, 16L), ImmutableList.of(11L, 22L), ImmutableList.of(12L, 24L)) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
public void testGroupId() RowPagesBuilder rowPagesBuilder = rowPagesBuilder(false, ImmutableList.of(), BIGINT, VARCHAR, BOOLEAN, BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(3, 100, 400, 0, 1000) new GroupIdOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BOOLEAN, BIGINT, BIGINT, BIGINT), ImmutableList.of(ImmutableMap.of(0, 1, 1, 2, 3, 0), ImmutableMap.of(2, 3, 3, 0))); MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BOOLEAN, BIGINT, BIGINT, BIGINT) .row("400", true, null, 100L, 0L) .row("401", false, null, 101L, 0L) .row("402", true, null, 102L, 0L) .row("500", true, null, 200L, 0L) .row("501", false, null, 201L, 0L) .row("502", true, null, 202L, 0L) .row(null, null, 1000L, 100L, 1L) .row(null, null, 1001L, 101L, 1L) .row(null, null, 1002L, 102L, 1L) .row(null, null, 1100L, 200L, 1L) .row(null, null, 1101L, 201L, 1L) .row(null, null, 1102L, 202L, 1L) .build();
private void verifyPartitionedBucketedTableAsFewRows(HiveStorageFormat storageFormat, String tableName) { TableMetadata tableMetadata = getTableMetadata(catalog, TPCH_SCHEMA, tableName); assertEquals(tableMetadata.getMetadata().getProperties().get(STORAGE_FORMAT_PROPERTY), storageFormat); assertEquals(tableMetadata.getMetadata().getProperties().get(PARTITIONED_BY_PROPERTY), ImmutableList.of("partition_key")); assertEquals(tableMetadata.getMetadata().getProperties().get(BUCKETED_BY_PROPERTY), ImmutableList.of("bucket_key")); assertEquals(tableMetadata.getMetadata().getProperties().get(BUCKET_COUNT_PROPERTY), 11); List<?> partitions = getPartitions(tableName); assertEquals(partitions.size(), 3); MaterializedResult actual = computeActual("SELECT * from " + tableName); MaterializedResult expected = resultBuilder(getSession(), canonicalizeType(createUnboundedVarcharType()), canonicalizeType(createUnboundedVarcharType()), canonicalizeType(createUnboundedVarcharType())) .row("a", "b", "c") .row("aa", "bb", "cc") .row("aaa", "bbb", "ccc") .build(); assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); }
private void assertDescribeOutputRowCount(@Language("SQL") String sql) { Session session = Session.builder(getSession()) .addPreparedStatement("my_query", sql) .build(); MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) .row("rows", "", "", "", "bigint", 8, false) .build(); assertEqualsIgnoreOrder(actual, expected); }
@Test public void testAggregation() InternalAggregationFunction maxVarcharColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation( new Signature("max", AGGREGATE, parseTypeSignature(StandardTypes.VARCHAR), parseTypeSignature(StandardTypes.VARCHAR))); List<Page> input = rowPagesBuilder(VARCHAR, BIGINT, VARCHAR, BIGINT, REAL, DOUBLE, VARCHAR) .addSequencePage(100, 0, 0, 300, 500, 400, 500, 500) .build(); new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), LONG_SUM.bind(ImmutableList.of(1), Optional.empty()), LONG_AVERAGE.bind(ImmutableList.of(1), Optional.empty()), maxVarcharColumn.bind(ImmutableList.of(2), Optional.empty()), .addDriverContext(); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT, REAL, DOUBLE, VARCHAR) .row(100L, 4950L, 49.5, "399", 100L, 54950L, 44950.0f, 54950.0, "599") .build();
@Test public void testDescribeOutputNamedAndUnnamed() { Session session = Session.builder(getSession()) .addPreparedStatement("my_query", "SELECT 1, name, regionkey AS my_alias FROM nation") .build(); MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) .row("_col0", "", "", "", "integer", 4, false) .row("name", session.getCatalog().get(), session.getSchema().get(), "nation", "varchar(25)", 0, false) .row("my_alias", session.getCatalog().get(), session.getSchema().get(), "nation", "bigint", 8, true) .build(); assertEqualsIgnoreOrder(actual, expected); }
@Test public void testDoubleArrayApproxPercentile() { assertWindowQuery("approx_percentile(DOUBLE '42.3', array[0.5]) over ()", resultBuilder(TEST_SESSION, INTEGER, VARCHAR, new ArrayType(DOUBLE)) .row(1, "O", ImmutableList.of(42.3d)) .row(2, "O", ImmutableList.of(42.3d)) .row(3, "F", ImmutableList.of(42.3d)) .row(4, "O", ImmutableList.of(42.3d)) .row(5, "F", ImmutableList.of(42.3d)) .row(6, "F", ImmutableList.of(42.3d)) .row(7, "O", ImmutableList.of(42.3d)) .row(32, "O", ImmutableList.of(42.3d)) .row(33, "F", ImmutableList.of(42.3d)) .row(34, "O", ImmutableList.of(42.3d)) .build()); } }
@Test public void testSinglePage() { RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, VARCHAR, BIGINT); List<Page> input = rowPagesBuilder .row(false, "a", 5) .build(); MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT) .row("a", 1L, 5L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
@Test public void testSortingYields() throws Exception yieldSignal.forceYieldForTesting(); List<Type> types = ImmutableList.of(INTEGER); WorkProcessor<Page> mergedPages = MergeSortedPages.mergeSortedPages( ImmutableList.of(WorkProcessor.fromIterable(rowPagesBuilder(types) .row(1) .build())), new SimplePageWithPositionComparator(types, ImmutableList.of(0), ImmutableList.of(DESC_NULLS_LAST)), ImmutableList.of(0), types, MaterializedResult expected = resultBuilder(TEST_SESSION, types) .row(1) .build(); assertEquals(toMaterializedResult(TEST_SESSION, types, ImmutableList.of(page)), expected);
@Test public void testWindowFunctionWithGroupBy() { MaterializedResult actual = computeActual("" + "SELECT *, rank() OVER (PARTITION BY x)\n" + "FROM (SELECT 'foo' x)\n" + "GROUP BY 1"); MaterializedResult expected = resultBuilder(getSession(), createVarcharType(3), BIGINT) .row("foo", 1L) .build(); assertEquals(actual, expected); }
@Test public void testSingleGroupingValue() { RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, VARCHAR, BIGINT); List<Page> input = rowPagesBuilder .row(true, "a", 1) .row(false, "a", 2) .row(true, "a", 3) .row(false, "a", 4) .row(true, "a", 5) .pageBreak() .row(false, "a", 6) .row(true, "a", 7) .row(false, "a", 8) .pageBreak() .pageBreak() .row(true, "a", 9) .row(false, "a", 10) .build(); MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT) .row("a", 10L, 55L) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); } }
@Test public void testSingleStream() throws Exception { List<Type> types = ImmutableList.of(INTEGER, INTEGER); MaterializedResult actual = mergeSortedPages( types, ImmutableList.of(0, 1), ImmutableList.of(ASC_NULLS_FIRST, DESC_NULLS_FIRST), ImmutableList.of( rowPagesBuilder(types) .row(1, 4) .row(2, 3) .pageBreak() .row(3, 2) .row(4, 1) .build())); MaterializedResult expected = resultBuilder(TEST_SESSION, types) .row(1, 4) .row(2, 3) .row(3, 2) .row(4, 1) .build(); assertEquals(actual, expected); }
@Test public void testDescribeOutputShowTables() { Session session = Session.builder(getSession()) .addPreparedStatement("my_query", "SHOW TABLES") .build(); MaterializedResult actual = computeActual(session, "DESCRIBE OUTPUT my_query"); MaterializedResult expected = resultBuilder(session, VARCHAR, VARCHAR, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN) .row("Table", session.getCatalog().get(), "information_schema", "tables", "varchar", 0, true) .build(); assertEqualsIgnoreOrder(actual, expected); }
@Test public void testMergeWithMemorySpill() RowPagesBuilder rowPagesBuilder = rowPagesBuilder(BIGINT); 0, new PlanNodeId("test"), ImmutableList.of(BIGINT), ImmutableList.of(0), ImmutableList.of(), Step.SINGLE, false, MaterializedResult.Builder resultBuilder = resultBuilder(driverContext.getSession(), BIGINT, BIGINT); for (int i = 0; i < smallPagesSpillThresholdSize + 10; ++i) { resultBuilder.row((long) i, (long) i); assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, resultBuilder.build());
@Test public void testShowSchemas() { MaterializedResult actualSchemas = computeActual("SHOW SCHEMAS").toTestTypes(); MaterializedResult.Builder resultBuilder = MaterializedResult.resultBuilder(getQueryRunner().getDefaultSession(), VARCHAR) .row(getQueryRunner().getDefaultSession().getSchema().orElse("tpch")); assertContains(actualSchemas, resultBuilder.build()); }
@Test public void testUnnestNonNumericDoubles() { MetadataManager metadata = createTestMetadataManager(); Type arrayType = metadata.getType(parseTypeSignature("array(double)")); Type mapType = metadata.getType(parseTypeSignature("map(bigint,double)")); List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType) .row(1L, arrayBlockOf(DOUBLE, NEGATIVE_INFINITY, POSITIVE_INFINITY, NaN), mapBlockOf(BIGINT, DOUBLE, ImmutableMap.of(1, NEGATIVE_INFINITY, 2, POSITIVE_INFINITY, 3, NaN))) .build(); OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory( 0, new PlanNodeId("test"), ImmutableList.of(0), ImmutableList.of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType), false); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, DOUBLE, BIGINT, DOUBLE) .row(1L, NEGATIVE_INFINITY, 1L, NEGATIVE_INFINITY) .row(1L, POSITIVE_INFINITY, 2L, POSITIVE_INFINITY) .row(1L, NaN, 3L, NaN) .build(); assertOperatorEquals(operatorFactory, driverContext, input, expected); }
@Test public void testMergeHyperLogLogWithNulls() { MaterializedResult actual = computeActual("SELECT cardinality(merge(create_hll(IF(orderstatus = 'O', custkey)))) FROM orders"); MaterializedResult expected = resultBuilder(getSession(), BIGINT) .row(1001L) .build(); assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows()); }