public static void test(final String query) throws Exception { QueryTestUtil.test(client, query); }
/** * Execute one or more queries separated by semicolons, and print the results, with the option to * add formatted arguments to the query string. * * @param client Dremio client to use * @param query the query string; may contain formatting specifications to be used by * {@link String#format(String, Object...)}. * @param args optional args to use in the formatting call for the query string * @throws Exception */ public static void test(final DremioClient client, final String query, Object... args) throws Exception { test(client, String.format(query, args)); }
public static void test(String query, Object... args) throws Exception { QueryTestUtil.test(client, String.format(query, args)); }
@Test public void tpch01() throws Exception { final String query = getFile("memory/tpch01_memory_leak.sql"); try { QueryTestUtil.test(client, "alter session set \"planner.slice_target\" = 10; " + query); } catch (UserRemoteException e) { if (e.getMessage().contains("Allocator closed with outstanding buffers allocated")) { return; } throw e; } fail("Expected UserRemoteException indicating memory leak"); }
@Test public void testGroupScanWithPartitionIdentificationOff() throws Exception { long defaultValue = ExecConstants.PARQUET_MAX_PARTITION_COLUMNS_VALIDATOR.getDefault().getNumVal(); try { QueryTestUtil.test(getRpcClient(), "alter system set \"store.parquet" + ".partition_column_limit\" = 0"); List<String> partitionColumnList = getPartitionColumnsForDataSet ("datasets/parquet_no_partition_identification"); Assert.assertEquals(1, partitionColumnList.size()); Assert.assertEquals( "$_dremio_$_update_$", partitionColumnList.get(0)); } finally { QueryTestUtil.test(getRpcClient(), "alter system set \"store.parquet" + ".partition_column_limit\" = " + defaultValue); } }