@Before public void setup() { optionManager = mock(OptionManager.class); when(optionManager.getOption(eq(ExecConstants.SLICE_TARGET))) .thenReturn(ExecConstants.SLICE_TARGET_OPTION.getDefault()); when(optionManager.getOption(eq(PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName()))) .thenReturn(PlannerSettings.ENABLE_LEAF_LIMITS.getDefault()); when(optionManager.getOption(eq(PlannerSettings.ENABLE_TRIVIAL_SINGULAR.getOptionName()))) .thenReturn(PlannerSettings.ENABLE_TRIVIAL_SINGULAR.getDefault()); ClusterResourceInformation info = mock(ClusterResourceInformation.class); when(info.getExecutorNodeCount()).thenReturn(1); plannerSettings = new PlannerSettings(DremioTest.DEFAULT_SABOT_CONFIG, optionManager, info); cluster = RelOptCluster.create(new VolcanoPlanner(plannerSettings), rexBuilder); }
@Test @Ignore("interval") public void checkReadWriteExtended() throws Exception { final String originalFile = "${WORKING_PATH}/src/test/resources/vector/complex/extended.json".replaceAll( Pattern.quote("${WORKING_PATH}"), Matcher.quoteReplacement(TestTools.getWorkingPath())); final String newTable = "TestExtendedTypes/newjson"; try { testNoResult(String.format("ALTER SESSION SET \"%s\" = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName())); testNoResult(String.format("ALTER SESSION SET \"%s\" = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName())); // create table test("create table dfs_test.tmp.\"%s\" as select * from dfs.\"%s\"", newTable, originalFile); // check query of table. test("select * from dfs_test.tmp.\"%s\"", newTable); // check that original file and new file match. final byte[] originalData = Files.readAllBytes(Paths.get(originalFile)); final byte[] newData = Files.readAllBytes(Paths.get(BaseTestQuery.getDfsTestTmpSchemaLocation() + '/' + newTable + "/0_0_0.json")); assertEquals(new String(originalData), new String(newData)); } finally { testNoResult(String.format("ALTER SESSION SET \"%s\" = '%s'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName(), ExecConstants.OUTPUT_FORMAT_VALIDATOR.getDefault().getValue())); testNoResult(String.format("ALTER SESSION SET \"%s\" = %s", ExecConstants.JSON_EXTENDED_TYPES.getOptionName(), ExecConstants.JSON_EXTENDED_TYPES.getDefault().getValue())); } }
@Test public void testTPCHReadWriteNoDictUncompressed() throws Exception { try { test(String.format("alter session set \"%s\" = false", ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING)); test(String.format("alter session set \"%s\" = 'none'", ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE)); String inputTable = "cp.\"tpch/supplier.parquet\""; runTestAndValidate("*", "*", inputTable, "supplier_parquet_no_dict_uncompressed", false); } finally { test(String.format("alter session set \"%s\" = %b", ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR.getDefault().getBoolVal())); test(String.format("alter session set \"%s\" = '%s'", ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE, ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR.getDefault().getStringVal())); } }
@Test public void testMongoExtendedTypes() throws Exception { final String originalFile = "${WORKING_PATH}/src/test/resources/vector/complex/mongo_extended.json".replaceAll( Pattern.quote("${WORKING_PATH}"), Matcher.quoteReplacement(TestTools.getWorkingPath())); try { testNoResult(String.format("ALTER SESSION SET \"%s\" = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName())); testNoResult(String.format("ALTER SESSION SET \"%s\" = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName())); int actualRecordCount = testSql(String.format("select * from dfs.\"%s\"", originalFile)); assertEquals( String.format( "Received unexpected number of rows in output: expected=%d, received=%s", 1, actualRecordCount), 1, actualRecordCount); List<QueryDataBatch> resultList = testSqlWithResults(String.format("select * from dfs.\"%s\"", originalFile)); String actual = getResultString(resultList, ","); String expected = "dremio_timestamp_millies,bin,bin1\n2015-07-07T03:59:43.488,dremio,dremio\n"; Assert.assertEquals(expected, actual); } finally { testNoResult(String.format("ALTER SESSION SET \"%s\" = '%s'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName(), ExecConstants.OUTPUT_FORMAT_VALIDATOR.getDefault().getValue())); testNoResult(String.format("ALTER SESSION SET \"%s\" = %s", ExecConstants.JSON_EXTENDED_TYPES.getOptionName(), ExecConstants.JSON_EXTENDED_TYPES.getDefault().getValue())); } } }
@Test public void simpleSelectWithLimitWithSoftScanWithLeafLimitsEnabled() { when(optionManager.getOption(eq(PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName()))) .thenReturn(OptionValue.createBoolean(OptionType.QUERY, PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName(), true)); try { Prel input = newScreen( newLimit(0, 10, newProject(exprs(), rowType(), newUnionExchange( newLimit(0, 10, newProject(exprs(), rowType(), newSoftScan(rowType()) ) ) ) ) ) ); Prel output = SimpleLimitExchangeRemover.apply(plannerSettings, input); verifyOutput(output, "Screen", "Limit", "Project", "UnionExchange", "Limit", "Project", "SystemScan"); } finally { when(optionManager.getOption(eq(PlannerSettings.ENABLE_LEAF_LIMITS.getOptionName()))) .thenReturn(PlannerSettings.ENABLE_LEAF_LIMITS.getDefault()); } }
@Test public void testVJoinDecimal() throws Exception { try { test(String.format("alter system set \"%s\" = true", PlannerSettings .ENABLE_DECIMAL_DATA_TYPE_KEY)); final String query = "select count(*) from " + "(select EXPR$0 as d1 from cp.\"join/decimal_join/decimals-500.parquet\")t1" + " join " + "(select EXPR$0 as d2 from cp.\"join/decimal_join/decimals-a.parquet\")t2" + " on t1.d1 = t2.d2"; testBuilder().sqlQuery(query) .unOrdered() .baselineColumns("EXPR$0") .baselineValues(500l) .go(); } finally { Boolean decimalDefault = PlannerSettings.ENABLE_DECIMAL_DATA_TYPE.getDefault().getBoolVal(); test(String.format("alter system set \"%s\" = %s", PlannerSettings .ENABLE_DECIMAL_DATA_TYPE_KEY, decimalDefault.toString())); } } }
@Test @Ignore // TODO file JIRA to fix this public void testFix2967() throws Exception { setSessionOption(PlannerSettings.BROADCAST.getOptionName(), "false"); setSessionOption(PlannerSettings.HASHJOIN.getOptionName(), "false"); setSessionOption(ExecConstants.SLICE_TARGET, "1"); setSessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, "23"); final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources"; try { test("select * from dfs.\"%s/join/j1\" j1 left outer join dfs.\"%s/join/j2\" j2 on (j1.c_varchar = j2.c_varchar)", TEST_RES_PATH, TEST_RES_PATH); } finally { setSessionOption(PlannerSettings.BROADCAST.getOptionName(), String.valueOf(PlannerSettings.BROADCAST.getDefault ().getBoolVal())); setSessionOption(PlannerSettings.HASHJOIN.getOptionName(), String.valueOf(PlannerSettings.HASHJOIN.getDefault() .getBoolVal())); setSessionOption(ExecConstants.SLICE_TARGET, String.valueOf(ExecConstants.SLICE_TARGET_DEFAULT)); setSessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, String.valueOf(ExecConstants.MAX_WIDTH_PER_NODE .getDefault().getNumVal())); } }
public void compareParquetReadersHyperVector(String selection, String table) throws Exception { String query = "select " + selection + " from " + table; try { testBuilder() .ordered() .highPerformanceComparison() .sqlQuery(query) .optionSettingQueriesForTestQuery( "alter system set \"store.parquet.use_new_reader\" = false") .sqlBaselineQuery(query) .optionSettingQueriesForBaseline( "alter system set \"store.parquet.use_new_reader\" = true") .build().run(); } finally { test("alter system set \"%s\" = %b", ExecConstants.PARQUET_NEW_RECORD_READER, ExecConstants.PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR .getDefault().getBoolVal()); } }
public void compareParquetReadersColumnar(String selection, String table) throws Exception { String query = "select " + selection + " from " + table; try { testBuilder() .ordered() .sqlQuery(query) .optionSettingQueriesForTestQuery( "alter system set \"store.parquet.use_new_reader\" = false") .sqlBaselineQuery(query) .optionSettingQueriesForBaseline( "alter system set \"store.parquet.use_new_reader\" = true") .build().run(); } finally { test("alter system set \"%s\" = %b", ExecConstants.PARQUET_NEW_RECORD_READER, ExecConstants.PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR .getDefault().getBoolVal()); } }
public void compareParquetReadersColumnar(String selection, String table) throws Exception { String query = "select " + selection + " from " + table; try(AutoCloseable o = withSystemOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP_VALIDATOR, false)) { testBuilder() .ordered() .sqlQuery(query) .optionSettingQueriesForTestQuery( "alter system set \"store.parquet.use_new_reader\" = false") .sqlBaselineQuery(query) .optionSettingQueriesForBaseline( "alter system set \"store.parquet.use_new_reader\" = true") .build().run(); } finally { test("alter system set \"%s\" = %b", ExecConstants.PARQUET_NEW_RECORD_READER, ExecConstants.PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR .getDefault().getBoolVal()); } }
@Test public void testTPCHReadWrite1_date_convertedType() throws Exception { try { test("alter session set \"%s\" = false", ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING); String selection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " + "L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE, cast(L_COMMITDATE as DATE) as L_COMMITDATE, cast(L_RECEIPTDATE as DATE) AS L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT"; String validationSelection = "L_ORDERKEY, L_PARTKEY, L_SUPPKEY, L_LINENUMBER, L_QUANTITY, L_EXTENDEDPRICE, L_DISCOUNT, L_TAX, " + "L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE,L_COMMITDATE ,L_RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT"; String inputTable = "cp.\"tpch/lineitem.parquet\""; runTestAndValidate(selection, validationSelection, inputTable, "lineitem_parquet_converted", false); } finally { test("alter session set \"%s\" = %b", ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, ExecConstants .PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR.getDefault().getBoolVal()); } }