@Override public String getConfVal(String key, String defaultVal) { return hiveConfig.get(key,defaultVal); }
public void validate(HiveConf conf) throws HiveException { for (ConfVars var : restrictedHiveConf) { String userValue = HiveConf.getVarWithoutType(conf, var), serverValue = HiveConf.getVarWithoutType(initConf, var); // Note: with some trickery, we could add logic for each type in ConfVars; for now the // potential spurious mismatches (e.g. 0 and 0.0 for float) should be easy to work around. validateRestrictedConfigValues(var.varname, userValue, serverValue); } for (String var : restrictedNonHiveConf) { String userValue = conf.get(var), serverValue = initConf.get(var); validateRestrictedConfigValues(var, userValue, serverValue); } }
public static MetaDataFormatter getFormatter(HiveConf conf) { if ("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) { return new JsonMetaDataFormatter(); } else { return new TextMetaDataFormatter(conf.getIntVar(HiveConf.ConfVars.CLIPRETTYOUTPUTNUMCOLS), conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)); } }
private void printAppInfo() { String sparkMaster = hiveConf.get("spark.master"); if (sparkMaster != null && sparkMaster.startsWith("yarn")) { String appID = sparkJobStatus.getAppID(); if (appID != null) { console.printInfo("Running with YARN Application = " + appID); console.printInfo("Kill Command = " + HiveConf.getVar(hiveConf, HiveConf.ConfVars.YARNBIN) + " application -kill " + appID); } } } }
@Test public void testHideNonStringVar() throws Exception { Assert.assertTrue(conf.getBoolean("dummyBoolean", false)); Assert.assertEquals("true", conf.get("dummyBoolean")); HiveConfUtil.stripConfigurations(conf, Sets.newHashSet("dummyBoolean")); Assert.assertFalse(conf.getBoolean("dummyBoolean", false)); Assert.assertEquals("", conf.get("dummyBoolean")); }
@Test public void testEncodingDecoding() throws UnsupportedEncodingException { HiveConf conf = new HiveConf(); String query = "select blah, '\u0001' from random_table"; conf.setQueryString(query); Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), conf.get(ConfVars.HIVEQUERYSTRING.varname)); Assert.assertEquals(query, conf.getQueryString()); } }
@Override public String getUserName() { String newUserName = sessionState.getConf().get("user.name", "").trim(); if (newUserName.isEmpty()) { return System.getProperty("user.name"); } else { return newUserName; } }
/** * Adds a newly created materialized view to the cache. * * @param materializedViewTable the materialized view */ public RelOptMaterialization createMaterializedView(HiveConf conf, Table materializedViewTable) { final boolean cache = !conf.get(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname) .equals("DUMMY"); return addMaterializedView(conf, materializedViewTable, OpType.CREATE, cache); }
private static void setSessionVariableFromConf(SessionState ss, String varname, HiveConf conf) { String value = conf.get(varname); if (value != null) { SetProcessor.setConf(ss, varname, varname, value, false); } }
@Override public void start() { long reportingInterval = HiveConf.toTime(conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_INTERVAL.varname), TimeUnit.SECONDS, TimeUnit.SECONDS); reporter.start(reportingInterval, TimeUnit.SECONDS); }
private void setTxnConfigs() { String validTxnList = queryState.getConf().get(ValidTxnList.VALID_TXNS_KEY); if (validTxnList != null) { conf.set(ValidTxnList.VALID_TXNS_KEY, validTxnList); } }
private void recordValidTxns(HiveTxnManager txnMgr) throws LockException { String oldTxnString = conf.get(ValidTxnList.VALID_TXNS_KEY); if ((oldTxnString != null) && (oldTxnString.length() > 0)) { throw new IllegalStateException("calling recordValidTxn() more than once in the same " + JavaUtils.txnIdToString(txnMgr.getCurrentTxnId())); } ValidTxnList txnList = txnMgr.getValidTxns(); String txnStr = txnList.toString(); conf.set(ValidTxnList.VALID_TXNS_KEY, txnStr); LOG.debug("Encoding valid txns info " + txnStr + " txnid:" + txnMgr.getCurrentTxnId()); }
@Test public void testHideMultipleVars() throws Exception { Assert.assertEquals("aaa", conf.get("dummy")); Assert.assertEquals("aaa", conf.get("dummy2")); Assert.assertEquals("aaa", conf.get("3dummy")); HiveConfUtil.stripConfigurations(conf, Sets.newHashSet("dummy")); Assert.assertEquals("", conf.get("dummy")); Assert.assertEquals("", conf.get("dummy2")); Assert.assertEquals("aaa", conf.get("3dummy")); }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); if (ss != null && ss.getConf().get(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname) .equals("DUMMY")) { HiveMaterializedViewsRegistry.get().init(Hive.get(ss.getConf())); } }
public Metrics2Reporter(MetricRegistry registry, HiveConf conf) { this.metricRegistry = registry; this.conf = conf; String applicationName = conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_COMPONENT_NAME.varname); reporter = HadoopMetrics2Reporter.forRegistry(metricRegistry) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build(DefaultMetricsSystem.initialize(applicationName), // The application-level name applicationName, // Component name applicationName, // Component description "General"); // Name for each metric record }
/** Reopens the session that was found to not be running. */ @Override public TezSessionState reopen(TezSessionState sessionState) throws Exception { HiveConf sessionConf = sessionState.getConf(); if (sessionState.getQueueName() != null && sessionConf.get(TezConfiguration.TEZ_QUEUE_NAME) == null) { sessionConf.set(TezConfiguration.TEZ_QUEUE_NAME, sessionState.getQueueName()); } reopenInternal(sessionState); return sessionState; }
@Test public void testValidTxnsBookkeeping() throws Exception { // 1. Run a query against a non-ACID table, and we shouldn't have txn logged in conf runStatementOnDriver("select * from " + Table.NONACIDORCTBL); String value = hiveConf.get(ValidTxnWriteIdList.VALID_TABLES_WRITEIDS_KEY); Assert.assertNull("The entry should be null for query that doesn't involve ACID tables", value); }
private void startReplicationTargetMetaStoreIfRequired() throws Exception { if (!isReplicationTargetHCatRunning) { HiveConf conf = new HiveConf(); conf.set("javax.jdo.option.ConnectionURL", hcatConf.get("javax.jdo.option.ConnectionURL") .replace("metastore", "target_metastore")); replicationTargetHCatPort = MetaStoreTestUtils.startMetaStoreWithRetry(conf); replicationTargetHCatConf = new HiveConf(hcatConf); replicationTargetHCatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + replicationTargetHCatPort); isReplicationTargetHCatRunning = true; } }