public void verifyAndSetAll(Map<String, String> overlay) { for (Entry<String, String> entry : overlay.entrySet()) { verifyAndSet(entry.getKey(), entry.getValue()); } }
private void verifyRestriction(String varName, String newVal) { try { conf.verifyAndSet(varName, newVal); fail("Setting config property " + varName + " should fail"); } catch (IllegalArgumentException e) { // the verifyAndSet in this case is expected to fail with the IllegalArgumentException } } }
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException { SessionState.setCurrentSessionState(sessionState); for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith("set:")) { try { SetProcessor.setVariable(key.substring(4), entry.getValue()); } catch (Exception e) { throw new HiveSQLException(e); } } else if (key.startsWith("use:")) { try { if (sessionHive.getDatabase(entry.getValue()) == null) { throw new HiveSQLException("Database " + entry.getValue() + " does not exist"); } } catch (HiveException e) { throw new HiveSQLException(e); } SessionState.get().setCurrentDatabase(entry.getValue()); } else { sessionConf.verifyAndSet(key, entry.getValue()); } } }
private void assertConfModificationException(HiveConf processedConf, String param) { boolean caughtEx = false; try { processedConf.verifyAndSet(param, "dummy"); } catch (IllegalArgumentException e) { caughtEx = true; } assertTrue("Exception should be thrown while modifying the param " + param, caughtEx); }
conf.verifyAndSet(key, value); if (HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname.equals(key)) { if (!"spark".equals(value)) {
conf.verifyAndSet(key, value); if (HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname.equals(key)) { if (!"spark".equals(value)) {
conf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); } catch (IllegalArgumentException e) { throw new RuntimeException("Error applying statement specific settings", e);
/** * Test that configs in restrict list can't be changed * @throws Exception */ @Test public void testRestriction() throws Exception { verifyRestriction(ConfVars.HIVETESTMODEPREFIX.varname, "foo"); conf.verifyAndSet(ConfVars.HIVE_AM_SPLIT_GENERATION.varname, "false"); }
queryConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); } catch (IllegalArgumentException e) { throw new RuntimeException("Error applying statement specific settings", e);
/** * Test that SQLStdHiveAccessController is not applying config restrictions on CLI * * @throws HiveAuthzPluginException */ @Test public void testConfigProcessing() throws HiveAuthzPluginException { HiveConf processedConf = new HiveConf(); SQLStdHiveAccessController accessController = new SQLStdHiveAccessController(null, processedConf, new HadoopDefaultAuthenticator(), getCLISessionCtx() ); accessController.applyAuthorizationConfigPolicy(processedConf); // check that hook to disable transforms has not been added assertFalse("Check for transform query disabling hook", processedConf.getVar(ConfVars.PREEXECHOOKS).contains(DisallowTransformHook.class.getName())); // verify that some dummy param can be set processedConf.verifyAndSet("dummy.param", "dummy.val"); processedConf.verifyAndSet(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true"); }
/** * Verify that params in settableParams can be modified, and other random ones can't be modified * @param settableParams * @param processedConf */ private void verifyParamSettability(List<String> settableParams, HiveConf processedConf) { // verify that the whitlelist params can be set for (String param : settableParams) { try { processedConf.verifyAndSet(param, "dummy"); } catch (IllegalArgumentException e) { fail("Unable to set value for parameter in whitelist " + param + " " + e); } } // verify that non whitelist params can't be set assertConfModificationException(processedConf, "dummy.param"); // does not make sense to have any of the metastore config variables to be // modifiable for (ConfVars metaVar : HiveConf.metaVars) { assertConfModificationException(processedConf, metaVar.varname); } }
conf.verifyAndSet(name, ""); conf.verifyAndSet(name + "postfix", ""); Assert.fail("Setting config property " + name + " should fail"); } catch (IllegalArgumentException e) {
@Test public void testSparkConfigUpdate(){ HiveConf conf = new HiveConf(); Assert.assertFalse(conf.getSparkConfigUpdated()); conf.verifyAndSet("spark.master", "yarn"); Assert.assertTrue(conf.getSparkConfigUpdated()); conf.verifyAndSet("hive.execution.engine", "spark"); Assert.assertTrue("Expected spark config updated.", conf.getSparkConfigUpdated()); conf.setSparkConfigUpdated(false); Assert.assertFalse(conf.getSparkConfigUpdated()); } @Test
public void verifyAndSetAll(Map<String, String> overlay) { for (Entry<String, String> entry : overlay.entrySet()) { verifyAndSet(entry.getKey(), entry.getValue()); } }
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException { SessionState.setCurrentSessionState(sessionState); for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith("set:")) { try { setVariable(key.substring(4), entry.getValue()); } catch (Exception e) { throw new HiveSQLException(e); } } else if (key.startsWith("use:")) { SessionState.get().setCurrentDatabase(entry.getValue()); } else { hiveConf.verifyAndSet(key, entry.getValue()); } } }
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException { SessionState.setCurrentSessionState(sessionState); for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith("set:")) { try { SetProcessor.setVariable(key.substring(4), entry.getValue()); } catch (Exception e) { throw new HiveSQLException(e); } } else if (key.startsWith("use:")) { SessionState.get().setCurrentDatabase(entry.getValue()); } else { hiveConf.verifyAndSet(key, entry.getValue()); } } }
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException { SessionState.setCurrentSessionState(sessionState); for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith("set:")) { try { setVariable(key.substring(4), entry.getValue()); } catch (Exception e) { throw new HiveSQLException(e); } } else if (key.startsWith("use:")) { SessionState.get().setCurrentDatabase(entry.getValue()); } else { hiveConf.verifyAndSet(key, entry.getValue()); } } }
private void configureSession(Map<String, String> sessionConfMap) throws HiveSQLException { SessionState.setCurrentSessionState(sessionState); for (Map.Entry<String, String> entry : sessionConfMap.entrySet()) { String key = entry.getKey(); if (key.startsWith("set:")) { try { SetProcessor.setVariable(key.substring(4), entry.getValue()); } catch (Exception e) { throw new HiveSQLException(e); } } else if (key.startsWith("use:")) { SessionState.get().setCurrentDatabase(entry.getValue()); } else { hiveConf.verifyAndSet(key, entry.getValue()); } } }
private static void setConf(String varname, String key, String varvalue, boolean register) throws IllegalArgumentException { HiveConf conf = SessionState.get().getConf(); String value = new VariableSubstitution().substitute(conf, varvalue); if (conf.getBoolVar(HiveConf.ConfVars.HIVECONFVALIDATION)) { HiveConf.ConfVars confVars = HiveConf.getConfVars(key); if (confVars != null) { if (!confVars.isType(value)) { StringBuilder message = new StringBuilder(); message.append("'SET ").append(varname).append('=').append(varvalue); message.append("' FAILED because ").append(key).append(" expects "); message.append(confVars.typeString()).append(" type value."); throw new IllegalArgumentException(message.toString()); } String fail = confVars.validate(value); if (fail != null) { StringBuilder message = new StringBuilder(); message.append("'SET ").append(varname).append('=').append(varvalue); message.append("' FAILED in validation : ").append(fail).append('.'); throw new IllegalArgumentException(message.toString()); } } else if (key.startsWith("hive.")) { throw new IllegalArgumentException("hive configuration " + key + " does not exists."); } } conf.verifyAndSet(key, value); if (register) { SessionState.get().getOverriddenConfigurations().put(key, value); } }
/** * If there are query specific settings to overlay, then create a copy of config * There are two cases we need to clone the session config that's being passed to hive driver * 1. Async query - * If the client changes a config setting, that shouldn't reflect in the execution already underway * 2. confOverlay - * The query specific settings should only be applied to the query config and not session * @return new configuration * @throws HiveSQLException */ private HiveConf getConfigForOperation() throws HiveSQLException { HiveConf sqlOperationConf = getParentSession().getHiveConf(); if (!getConfOverlay().isEmpty() || shouldRunAsync()) { // clone the parent session config for this query sqlOperationConf = new HiveConf(sqlOperationConf); // apply overlay query specific settings, if any for (Map.Entry<String, String> confEntry : getConfOverlay().entrySet()) { try { sqlOperationConf.verifyAndSet(confEntry.getKey(), confEntry.getValue()); } catch (IllegalArgumentException e) { throw new HiveSQLException("Error applying statement specific settings", e); } } } return sqlOperationConf; } }