private HiveConf createHiveConf(String metaStoreURI, boolean tokenAuthEnabled) { if (!tokenAuthEnabled) { return null; } HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); return hcatConf; }
public HiveConf createHiveConf(String metaStoreURI, String hiveMetaStorePrincipal) throws IOException { HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, hiveMetaStorePrincipal); return hcatConf; }
@Override public void init(QueryState queryState, LogHelper console, Hive db) { super.init(queryState, console, db); // Tez requires us to use RPC for the query plan HiveConf.setBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN, true); // We require the use of recursive input dirs for union processing conf.setBoolean("mapred.input.dir.recursive", true); }
public static HiveConf getHiveConf() { HiveConf conf = new HiveConf(); // String metastoreDBLocation = "jdbc:derby:databaseName=/tmp/metastore_db;create=true"; // conf.set("javax.jdo.option.ConnectionDriverName","org.apache.derby.jdbc.EmbeddedDriver"); // conf.set("javax.jdo.option.ConnectionURL",metastoreDBLocation); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); return conf; }
@Test public void testDeleteEventFilteringOn3() throws Exception { HiveConf.setBoolVar(conf, HiveConf.ConfVars.FILTER_DELETE_EVENTS, true); testDeleteEventFiltering3(); }
@Test public void testDeleteEventOriginalFilteringOff2() throws Exception { HiveConf.setBoolVar(conf, HiveConf.ConfVars.FILTER_DELETE_EVENTS, false); testDeleteEventOriginalFiltering2(); }
@Override @Before public void setUp() throws Exception { setUpWithTableProperties("'transactional'='true','transactional_properties'='default'"); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); }
@Test public void testMultiStatementVectorized() throws Exception { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); testMultiStatement(true); } private void loadDataUpdate(boolean isVectorized) throws Exception {
@Test public void addPartitionMMVectorized() throws Exception { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); addPartitionMM(true); }
public static VectorizedParquetRecordReader createTestParquetReader(String schemaString, Configuration conf) throws IOException, InterruptedException, HiveException { conf.set(PARQUET_READ_SCHEMA, schemaString); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp"); Job vectorJob = new Job(conf, "read vector"); ParquetInputFormat.setInputPaths(vectorJob, file); initialVectorizedRowBatchCtx(conf); return new VectorizedParquetRecordReader(getFileSplit(vectorJob), new JobConf(conf)); }
@BeforeClass public static void setUp() throws Exception { LOG.debug("Setting up output service"); Configuration conf = new Configuration(); // Pick random avail port HiveConf.setIntVar(conf, HiveConf.ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT, 0); HiveConf.setBoolVar(conf, HiveConf.ConfVars.LLAP_OUTPUT_FORMAT_ARROW, false); LlapOutputFormatService.initializeAndStart(conf, null); service = LlapOutputFormatService.get(); LlapProxy.setDaemon(true); LOG.debug("Output service up"); }
public TestDbTxnManager2() throws Exception { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); TxnDbUtil.setConfValues(conf); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); } @Before
@Test public void testStatCachingQuery() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "query"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(false, false, false); }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; } }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; }
@Test public void testSslIsFalse() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage(is(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used.")); hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); }
@Before public void init() throws Exception { conf = new HiveConf(); conf.setBoolVar(HIVE_SERVER2_METRICS_ENABLED, true); conf.setVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR, System.getProperty("java.io.tmpdir")); conf.setTimeVar(HIVE_SERVER2_COMPILE_LOCK_TIMEOUT, 15, TimeUnit.SECONDS); MetricsFactory.close(); MetricsFactory.init(conf); }
@Test public void testParallelCompilationWithUnboundedQuota() throws Exception { conf.setBoolVar(HIVE_SERVER2_PARALLEL_COMPILATION, true); conf.setIntVar(HIVE_SERVER2_PARALLEL_COMPILATION_LIMIT, -1); initDriver(conf, 10); List<CommandProcessorResponse> responseList = compileAndRespond(10); verifyThatWaitingCompileOpsCountIsEqualTo(0); verifyThatTimedOutCompileOpsCountIsZero(responseList); verifyThatConcurrentCompilationWasIndeed(responseList); }
@Test public void testParallelCompilationWithUnboundedQuotaAndSingleSession() throws Exception { conf.setBoolVar(HIVE_SERVER2_PARALLEL_COMPILATION, true); conf.setIntVar(HIVE_SERVER2_PARALLEL_COMPILATION_LIMIT, -1); initDriver(conf, 10); List<CommandProcessorResponse> responseList = compileAndRespond(true, 10); verifyThatWaitingCompileOpsCountIsEqualTo(0); verifyThatTimedOutCompileOpsCountIsZero(responseList); verifyThatNoConcurrentCompilationWasIndeed(responseList); }
@Test public void testParallelCompilationWithMultipleQuotas() throws Exception { conf.setBoolVar(HIVE_SERVER2_PARALLEL_COMPILATION, true); conf.setIntVar(HIVE_SERVER2_PARALLEL_COMPILATION_LIMIT, 2); initDriver(conf, 10); List<CommandProcessorResponse> responseList = compileAndRespond(10); verifyThatWaitingCompileOpsCountIsEqualTo(0); verifyThatTimedOutCompileOpsCountIsZero(responseList); verifyThatConcurrentCompilationWasIndeed(responseList); }