/** * Checks whether a given configuration name is blacklisted and should not be converted * to an environment variable. */ boolean blackListed(Configuration conf, String name) { if (blackListedConfEntries == null) { blackListedConfEntries = new HashSet<String>(); if (conf != null) { String bl = conf.get(HiveConf.ConfVars.HIVESCRIPT_ENV_BLACKLIST.toString(), HiveConf.ConfVars.HIVESCRIPT_ENV_BLACKLIST.getDefaultValue()); if (bl != null && !bl.isEmpty()) { String[] bls = bl.split(","); Collections.addAll(blackListedConfEntries, bls); } } } return blackListedConfEntries.contains(name); }
/** * Get the ensemble server addresses from the configuration. The format is: host1:port, * host2:port.. * * @param conf configuration **/ private static String getQuorumServers(Configuration conf) { String[] hosts = conf.getTrimmedStrings(ConfVars.HIVE_ZOOKEEPER_QUORUM.varname); String port = conf.get(ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.varname, ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.getDefaultValue()); StringBuilder quorum = new StringBuilder(); for (int i = 0; i < hosts.length; i++) { quorum.append(hosts[i].trim()); if (!hosts[i].contains(":")) { // if the hostname doesn't contain a port, add the configured port to hostname quorum.append(":"); quorum.append(port); } if (i != hosts.length - 1) { quorum.append(","); } } return quorum.toString(); }
/** * Checks whether a given configuration name is blacklisted and should not be converted * to an environment variable. */ boolean blackListed(Configuration conf, String name) { if (blackListedConfEntries == null) { blackListedConfEntries = new HashSet<String>(); if (conf != null) { String bl = conf.get(HiveConf.ConfVars.HIVESCRIPT_ENV_BLACKLIST.toString(), HiveConf.ConfVars.HIVESCRIPT_ENV_BLACKLIST.getDefaultValue()); if (bl != null && !bl.isEmpty()) { String[] bls = bl.split(","); Collections.addAll(blackListedConfEntries, bls); } } } return blackListedConfEntries.contains(name); }
HiveConf.ConfVars.LLAP_DAEMON_QUEUE_NAME.getDefaultValue());
String propName = varname.substring(SystemVariables.HIVECONF_PREFIX.length()); nonErrorMessage = SetProcessor.setConf( varname, propName, getConfVar(propName).getDefaultValue(), false); } else if (varname.startsWith(SystemVariables.METACONF_PREFIX)) { String propName = varname.substring(SystemVariables.METACONF_PREFIX.length()); return SessionState.get().getHiveVariables(); }).substitute(ss.getConf(), confVars.getDefaultValue())); } else { String defaultVal = getConfVar(varname).getDefaultValue(); nonErrorMessage = SetProcessor.setConf(varname, varname, defaultVal, true); if (varname.equals(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED.toString())) {
@BeforeClass public static void beforeTests() throws Exception { webUIPort = MetaStoreTestUtils.findFreePortExcepting( Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); hiveConf = new HiveConf(); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd);
private int showConf(Hive db, ShowConfDesc showConf) throws Exception { ConfVars conf = HiveConf.getConfVars(showConf.getConfName()); if (conf == null) { throw new HiveException("invalid configuration name " + showConf.getConfName()); } String description = conf.getDescription(); String defaultValue = conf.getDefaultValue(); DataOutputStream output = getOutputStream(showConf.getResFile()); try { if (defaultValue != null) { output.write(defaultValue.getBytes()); } output.write(separator); output.write(conf.typeString().getBytes()); output.write(separator); if (description != null) { output.write(description.replaceAll(" *\n *", " ").getBytes()); } output.write(terminator); } finally { output.close(); } return 0; }
String propName = varname.substring(SystemVariables.HIVECONF_PREFIX.length()); nonErrorMessage = SetProcessor.setConf( varname, propName, getConfVar(propName).getDefaultValue(), false); } else if (varname.startsWith(SystemVariables.METACONF_PREFIX)) { String propName = varname.substring(SystemVariables.METACONF_PREFIX.length()); return SessionState.get().getHiveVariables(); }).substitute(ss.getConf(), confVars.getDefaultValue())); } else { String defaultVal = getConfVar(varname).getDefaultValue(); nonErrorMessage = SetProcessor.setConf(varname, varname, defaultVal, true); if (varname.equals(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED.toString())) {
@Test public void testConfProperties() throws Exception { // Make sure null-valued ConfVar properties do not override the Hadoop Configuration // NOTE: Comment out the following test case for now until a better way to test is found, // as this test case cannot be reliably tested. The reason for this is that Hive does // overwrite fs.default.name in HiveConf if the property is set in system properties. // checkHadoopConf(ConfVars.HADOOPFS.varname, "core-site.xml"); // checkConfVar(ConfVars.HADOOPFS, null); // checkHiveConf(ConfVars.HADOOPFS.varname, "core-site.xml"); // Make sure non-null-valued ConfVar properties *do* override the Hadoop Configuration checkHadoopConf(ConfVars.HADOOPNUMREDUCERS.varname, "1"); checkConfVar(ConfVars.HADOOPNUMREDUCERS, "-1"); checkHiveConf(ConfVars.HADOOPNUMREDUCERS.varname, "-1"); // Non-null ConfVar only defined in ConfVars checkHadoopConf(ConfVars.HIVESKEWJOINKEY.varname, null); checkConfVar(ConfVars.HIVESKEWJOINKEY, "100000"); checkHiveConf(ConfVars.HIVESKEWJOINKEY.varname, "100000"); // ConfVar overridden in in hive-site.xml checkHadoopConf(ConfVars.HIVETESTMODEDUMMYSTATAGGR.varname, null); checkConfVar(ConfVars.HIVETESTMODEDUMMYSTATAGGR, ""); checkHiveConf(ConfVars.HIVETESTMODEDUMMYSTATAGGR.varname, "value2"); // Property defined in hive-site.xml only checkHadoopConf("test.property1", null); checkHiveConf("test.property1", "value1"); // Test HiveConf property variable substitution in hive-site.xml checkHiveConf("test.var.hiveconf.property", ConfVars.DEFAULTPARTITIONNAME.getDefaultValue()); }
private void setAuthorizerV2Config() throws HiveException { // avoid processing the same config multiple times, check marker if (sessionConf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { return; } String metastoreHook = sessionConf.get(ConfVars.METASTORE_FILTER_HOOK.name()); if (!ConfVars.METASTORE_FILTER_HOOK.getDefaultValue().equals(metastoreHook) && !AuthorizationMetaStoreFilterHook.class.getName().equals(metastoreHook)) { LOG.warn(ConfVars.METASTORE_FILTER_HOOK.name() + " will be ignored, since hive.security.authorization.manager" + " is set to instance of HiveAuthorizerFactory."); } sessionConf.setVar(ConfVars.METASTORE_FILTER_HOOK, AuthorizationMetaStoreFilterHook.class.getName()); authorizerV2.applyAuthorizationConfigPolicy(sessionConf); // update config in Hive thread local as well and init the metastore client try { Hive.get(sessionConf).getMSC(); } catch (Exception e) { // catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise throw new HiveException(e.getMessage(), e); } // set a marker that this conf has been processed. sessionConf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); }
@Override public void serviceStart() throws Exception { // Start the Shuffle service before the listener - until it's a service as well. ShuffleHandler.initializeAndStart(shuffleHandlerConf); LOG.info("Setting shuffle port to: " + ShuffleHandler.get().getPort()); this.shufflePort.set(ShuffleHandler.get().getPort()); getConfig() .setInt(ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT.varname, ShuffleHandler.get().getPort()); LlapOutputFormatService.initializeAndStart(getConfig(), secretManager); super.serviceStart(); // Setup the actual ports in the configuration. getConfig().setInt(ConfVars.LLAP_DAEMON_RPC_PORT.varname, server.getBindAddress().getPort()); getConfig().setInt(ConfVars.LLAP_MANAGEMENT_RPC_PORT.varname, server.getManagementBindAddress().getPort()); if (webServices != null) { getConfig().setInt(ConfVars.LLAP_DAEMON_WEB_PORT.varname, webServices.getPort()); } getConfig().setInt(ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT.varname, LlapOutputFormatService.get().getPort()); // Ensure this is set in the config so that the AM can read it. getConfig() .setIfUnset(ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE.varname, ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE .getDefaultValue()); this.registry.init(getConfig()); this.registry.start(); LOG.info( "LlapDaemon serviceStart complete. RPC Port={}, ManagementPort={}, ShuflePort={}, WebPort={}", server.getBindAddress().getPort(), server.getManagementBindAddress().getPort(), ShuffleHandler.get().getPort(), (webServices == null ? "" : webServices.getPort())); }
private void setAuthorizerV2Config() throws HiveException { // avoid processing the same config multiple times, check marker if (sessionConf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { return; } String metastoreHook = sessionConf.getVar(ConfVars.METASTORE_FILTER_HOOK); if (!ConfVars.METASTORE_FILTER_HOOK.getDefaultValue().equals(metastoreHook) && !AuthorizationMetaStoreFilterHook.class.getName().equals(metastoreHook)) { LOG.warn(ConfVars.METASTORE_FILTER_HOOK.varname + " will be ignored, since hive.security.authorization.manager" + " is set to instance of HiveAuthorizerFactory."); } sessionConf.setVar(ConfVars.METASTORE_FILTER_HOOK, AuthorizationMetaStoreFilterHook.class.getName()); authorizerV2.applyAuthorizationConfigPolicy(sessionConf); // update config in Hive thread local as well and init the metastore client try { Hive.get(sessionConf).getMSC(); } catch (Exception e) { // catch-all due to some exec time dependencies on session state // that would cause ClassNoFoundException otherwise throw new HiveException(e.getMessage(), e); } // set a marker that this conf has been processed. sessionConf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); }
private int showConf(Hive db, ShowConfDesc showConf) throws Exception { ConfVars conf = HiveConf.getConfVars(showConf.getConfName()); if (conf == null) { throw new HiveException("invalid configuration name " + showConf.getConfName()); } String description = conf.getDescription(); String defaultValue = conf.getDefaultValue(); DataOutputStream output = getOutputStream(showConf.getResFile()); try { if (defaultValue != null) { output.write(defaultValue.getBytes()); } output.write(separator); output.write(conf.typeString().getBytes()); output.write(separator); if (description != null) { output.write(description.replaceAll(" *\n *", " ").getBytes()); } output.write(terminator); } finally { output.close(); } return 0; }
@Test public void testSessionImpl() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.getDefaultValue()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, SampleHiveSessionImpl.class.getName()); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); CLIService cliService = new CLIService(null, true); cliService.init(hiveConf); ThriftBinaryCLIService service = new ThriftBinaryCLIService(cliService, null); service.init(hiveConf); ThriftCLIServiceClient client = new ThriftCLIServiceClient(service); SessionHandle sessionHandle = null; sessionHandle = client.openSession("tom", "password"); assertEquals(SampleHiveSessionImpl.class.getName(), service.getHiveConf().getVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME)); HiveSession session = cliService.getSessionManager().getSession(sessionHandle); assertEquals(SampleHiveSessionImpl.MAGIC_RETURN_VALUE, session.getNoOperationTime()); client.closeSession(sessionHandle); }
@Test public void testSessionImplWithUGI() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.getDefaultValue()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_WITH_UGI_CLASSNAME, SampleHiveSessionImplWithUGI.class.getName()); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, true); CLIService cliService = new CLIService(null, true); cliService.init(hiveConf); ThriftBinaryCLIService service = new ThriftBinaryCLIService(cliService, null); service.init(hiveConf); ThriftCLIServiceClient client = new ThriftCLIServiceClient(service); SessionHandle sessionHandle = null; sessionHandle = client.openSession("tom", "password"); assertEquals(SampleHiveSessionImplWithUGI.class.getName(), service.getHiveConf().getVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_WITH_UGI_CLASSNAME)); HiveSession session = cliService.getSessionManager().getSession(sessionHandle); assertEquals(SampleHiveSessionImplWithUGI.MAGIC_RETURN_VALUE, session.getNoOperationTime()); client.closeSession(sessionHandle); }
@BeforeClass public static void beforeTests() throws Exception { webUIPort = MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); hiveConf = new HiveConf(); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd); hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString()); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true); hiveServer2 = new HiveServer2(new TestPamAuthenticator(hiveConf)); hiveServer2.init(hiveConf); hiveServer2.start(); Thread.sleep(5000); }
@BeforeClass public static void beforeTests() throws Exception { createTestDir(); createDefaultKeyStore(); String metastorePasswd = "693efe9fa425ad21886d73a0fa3fbc70"; //random md5 Integer webUIPort = MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue())); hiveConf = new HiveConf(); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, false); hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd); hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString()); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); }
private void checkConfVar(ConfVars var, String expectedConfVarVal) throws Exception { Assert.assertEquals(expectedConfVarVal, var.getDefaultValue()); }
/** * Overlays ConfVar properties with non-null values */ private static void applyDefaultNonNullConfVars(Configuration conf) { for (ConfVars var : ConfVars.values()) { String defaultValue = var.getDefaultValue(); if (defaultValue == null) { // Don't override ConfVars with null values continue; } conf.set(var.varname, defaultValue); } }
@BeforeClass public static void beforeTests() throws Exception { llapWSPort = MetaStoreTestUtils.findFreePortExcepting( Integer.valueOf(HiveConf.ConfVars.LLAP_DAEMON_WEB_PORT.getDefaultValue())); llapWS = new LlapWebServices(llapWSPort, null, null); llapWS.init(new HiveConf()); llapWS.start(); Thread.sleep(5000); }