public String getNewSparkSessionId() { return getSessionId() + "_" + Long.toString(this.sparkSessionId.getAndIncrement()); } }
public void resetThreadName() { final String sessionId = getSessionId(); final String logPrefix = getConf().getLogIdVar(sessionId); final String currThreadName = Thread.currentThread().getName(); if (currThreadName.contains(logPrefix)) { final String[] names = currThreadName.split(logPrefix); LOG.info("Resetting thread name to {}", names[names.length - 1]); Thread.currentThread().setName(names[names.length - 1].trim()); } }
public void updateThreadName() { final String sessionId = getSessionId(); final String logPrefix = getConf().getLogIdVar(sessionId); final String currThreadName = Thread.currentThread().getName(); if (!currThreadName.contains(logPrefix)) { final String newThreadName = logPrefix + " " + currThreadName; LOG.info("Updating thread name to {}", newThreadName); Thread.currentThread().setName(newThreadName); } }
public void updateThreadName() { final String sessionId = getSessionId(); final String logPrefix = getConf().getLogIdVar(sessionId); final String currThreadName = Thread.currentThread().getName(); if (!currThreadName.contains(logPrefix)) { final String newThreadName = logPrefix + " " + currThreadName; LOG.info("Updating thread name to {}", newThreadName); Thread.currentThread().setName(newThreadName); } }
public void resetThreadName() { final String sessionId = getSessionId(); final String logPrefix = getConf().getLogIdVar(sessionId); final String currThreadName = Thread.currentThread().getName(); if (currThreadName.contains(logPrefix)) { final String[] names = currThreadName.split(logPrefix); LOG.info("Resetting thread name to {}", names[names.length - 1]); Thread.currentThread().setName(names[names.length - 1].trim()); } }
@Override public void open(HiveConf conf) throws HiveException { closeLock.writeLock().lock(); try { if (!isOpen) { LOG.info("Trying to open Hive on Spark session {}", sessionId); this.conf = conf; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf, sessionId, SessionState.get().getSessionId()); isOpen = true; } catch (Throwable e) { throw getHiveException(e); } LOG.info("Hive on Spark session {} successfully opened", sessionId); } else { LOG.info("Hive on Spark session {} is already opened", sessionId); } } finally { closeLock.writeLock().unlock(); } }
private void setupSessionAcls(Configuration tezConf, HiveConf hiveConf) throws IOException { // TODO: De-link from SessionState. A TezSession can be linked to different Hive Sessions via the pool. String user = SessionState.getUserFromAuthenticator(); UserGroupInformation loginUserUgi = UserGroupInformation.getLoginUser(); String loginUser = loginUserUgi == null ? null : loginUserUgi.getShortUserName(); boolean addHs2User = HiveConf.getBoolVar(hiveConf, ConfVars.HIVETEZHS2USERACCESS); String viewStr = Utilities.getAclStringWithHiveModification(tezConf, TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, user, loginUser); String modifyStr = Utilities.getAclStringWithHiveModification(tezConf, TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, user, loginUser); if (LOG.isDebugEnabled()) { // TODO: De-link from SessionState. A TezSession can be linked to different Hive Sessions via the pool. LOG.debug( "Setting Tez Session access for sessionId={} with viewAclString={}, modifyStr={}", SessionState.get().getSessionId(), viewStr, modifyStr); } tezConf.set(TezConfiguration.TEZ_AM_VIEW_ACLS, viewStr); tezConf.set(TezConfiguration.TEZ_AM_MODIFY_ACLS, modifyStr); }
histFileName = conf_file_loc + File.separator + "hive_job_log_" + ss.getSessionId() + "_" + Math.abs(randGen.nextInt()) + ".txt"; } while (! new File(histFileName).createNewFile()); hm.put(Keys.SESSION_ID.name(), ss.getSessionId()); log(RecordTypes.SessionStart, hm); } catch (IOException e) {
private void setupSessionAcls(Configuration tezConf, HiveConf hiveConf) throws IOException { String user = SessionState.getUserFromAuthenticator(); UserGroupInformation loginUserUgi = UserGroupInformation.getLoginUser(); String loginUser = loginUserUgi == null ? null : loginUserUgi.getShortUserName(); boolean addHs2User = HiveConf.getBoolVar(hiveConf, ConfVars.HIVETEZHS2USERACCESS); String viewStr = Utilities.getAclStringWithHiveModification(tezConf, TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, user, loginUser); String modifyStr = Utilities.getAclStringWithHiveModification(tezConf, TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, user, loginUser); if (LOG.isDebugEnabled()) { LOG.debug( "Setting Tez Session access for sessionId={} with viewAclString={}, modifyStr={}", SessionState.get().getSessionId(), viewStr, modifyStr); } tezConf.set(TezConfiguration.TEZ_AM_VIEW_ACLS, viewStr); tezConf.set(TezConfiguration.TEZ_AM_MODIFY_ACLS, modifyStr); }
/** * Invoked after runInternal(), even if an exception is thrown in runInternal(). * Clean up resources, which was set up in beforeRun(). */ protected void afterRun() { LogUtils.unregisterLoggingContext(); // Reset back to session context after the query is done ShimLoader.getHadoopShims().setHadoopSessionContext(parentSession.getSessionState().getSessionId()); }
histFileName = conf_file_loc + File.separator + "hive_job_log_" + ss.getSessionId() + "_" + Math.abs(randGen.nextInt()) + ".txt"; } while (! new File(histFileName).createNewFile()); hm.put(Keys.SESSION_ID.name(), ss.getSessionId()); log(RecordTypes.SessionStart, hm); } catch (IOException e) {
String sessionId = getSessionId();
String sessionId = getSessionId();
public SessionState(HiveConf conf, String userName) { this.sessionConf = conf; this.userName = userName; this.registry = new Registry(false); if (LOG.isDebugEnabled()) { LOG.debug("SessionState user: " + userName); } isSilent = conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); resourceMaps = new ResourceMaps(); // Must be deterministic order map for consistent q-test output across Java versions overriddenConfigurations = new LinkedHashMap<String, String>(); overriddenConfigurations.putAll(HiveConf.getConfSystemProperties()); // if there isn't already a session name, go ahead and create it. if (StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HIVESESSIONID))) { conf.setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); getConsole().printInfo("Hive Session ID = " + getSessionId()); } // Using system classloader as the parent. Using thread context // classloader as parent can pollute the session. See HIVE-11878 parentLoader = SessionState.class.getClassLoader(); // Make sure that each session has its own UDFClassloader. For details see {@link UDFClassLoader} final ClassLoader currentLoader = Utilities.createUDFClassLoader((URLClassLoader) parentLoader, new String[]{}); this.sessionConf.setClassLoader(currentLoader); resourceDownloader = new ResourceDownloader(conf, HiveConf.getVar(conf, ConfVars.DOWNLOADED_RESOURCES_DIR)); killQuery = new NullKillQuery(); ShimLoader.getHadoopShims().setHadoopSessionContext(getSessionId()); }
authzContextBuilder.setClientType(isHiveServerQuery() ? CLIENT_TYPE.HIVESERVER2 : CLIENT_TYPE.HIVECLI); authzContextBuilder.setSessionString(getSessionId());
authzContextBuilder.setClientType(isHiveServerQuery() ? CLIENT_TYPE.HIVESERVER2 : CLIENT_TYPE.HIVECLI); authzContextBuilder.setSessionString(getSessionId());
startSs.setTezSession(new TezSessionState(startSs.getSessionId(), startSs.sessionConf)); } else {
startSs.setTezSession(new TezSessionState(startSs.getSessionId()));
@Test public void testGetSessionId() throws HiveException { SessionState ss = SessionState.start(SESSION_HIVE_CONF); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_1", ss.getSparkSession().getSessionId()); ss = SessionState.start(SESSION_HIVE_CONF); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); }
ss.getSessionId(), Thread.currentThread().getName(), ss.isHiveServerQuery(), perfLogger, queryInfo, ctx); hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK);