private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
@Override public void run() { try { SessionState ss = new SessionState(db.getConf()); ss.setIsHiveServerQuery(true); // All is served from HS2, we do not need e.g. Tez sessions SessionState.start(ss); final boolean cache = !db.getConf() .get(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname).equals("DUMMY"); for (String dbName : db.getAllDatabases()) { for (Table mv : db.getAllMaterializedViewObjects(dbName)) { addMaterializedView(db.getConf(), mv, OpType.LOAD, cache); } } initialized.set(true); LOG.info("Materialized views registry has been initialized"); } catch (HiveException e) { LOG.error("Problem connecting to the metastore when initializing the view registry", e); } } }
public static SessionState setUpSessionState(HiveConf conf, String user, boolean doStart) { SessionState sessionState = SessionState.get(); if (sessionState == null) { // Note: we assume that workers run on the same threads repeatedly, so we can set up // the session here and it will be reused without explicitly storing in the worker. sessionState = new SessionState(conf, user); if (doStart) { // TODO: Required due to SessionState.getHDFSSessionPath. Why wasn't it required before? sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); } SessionState.setCurrentSessionState(sessionState); } return sessionState; } }
@Override public void applyAction(final Map<TezSessionState, Trigger> queriesViolated) { for (Map.Entry<TezSessionState, Trigger> entry : queriesViolated.entrySet()) { switch (entry.getValue().getAction().getType()) { case KILL_QUERY: TezSessionState sessionState = entry.getKey(); String queryId = sessionState.getWmContext().getQueryId(); try { SessionState ss = new SessionState(new HiveConf()); ss.setIsHiveServerQuery(true); SessionState.start(ss); KillQuery killQuery = sessionState.getKillQuery(); // if kill query is null then session might have been released to pool or closed already if (killQuery != null) { sessionState.getKillQuery().killQuery(queryId, entry.getValue().getViolationMsg(), sessionState.getConf()); } } catch (HiveException e) { LOG.warn("Unable to kill query {} for trigger violation", queryId); } break; default: throw new RuntimeException("Unsupported action: " + entry.getValue()); } } } }
workPool.submit(() -> { SessionState ss = new SessionState(new HiveConf()); ss.setIsHiveServerQuery(true); SessionState.start(ss);
private SessionHandle openSession(Map<String, String> confOverlay) throws HiveSQLException { SessionHandle sessionHandle = client.openSession("tom", "password", confOverlay); assertNotNull(sessionHandle); SessionState.get().setIsHiveServerQuery(true); // Pretend we are in HS2. String queryString = "SET " + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + " = false"; client.executeStatement(sessionHandle, queryString, confOverlay); return sessionHandle; }
sessionState = new SessionState(sessionConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses()); sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe());
private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf); ss.setIsHiveServerQuery(true); SessionState.start(ss); ss.applyAuthorizationPolicy(); }
@Override /** * Opens a new HiveServer2 session for the client connection. * Creates a new SessionState object that will be associated with this HiveServer2 session. * When the server executes multiple queries in the same session, * this SessionState object is reused across multiple queries. * Note that if doAs is true, this call goes through a proxy object, * which wraps the method logic in a UserGroupInformation#doAs. * That's why it is important to create SessionState here rather than in the constructor. */ public void open(Map<String, String> sessionConfMap) throws HiveSQLException { sessionState = new SessionState(hiveConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); try { sessionState.reloadAuxJars(); } catch (IOException e) { String msg = "Failed to load reloadable jar file path: " + e; LOG.error(msg, e); throw new HiveSQLException(msg, e); } // Process global init file: .hiverc processGlobalInitFile(); if (sessionConfMap != null) { configureSession(sessionConfMap); } lastAccessTime = System.currentTimeMillis(); lastIdleTime = lastAccessTime; }
@Override /** * Opens a new HiveServer2 session for the client connection. * Creates a new SessionState object that will be associated with this HiveServer2 session. * When the server executes multiple queries in the same session, * this SessionState object is reused across multiple queries. * Note that if doAs is true, this call goes through a proxy object, * which wraps the method logic in a UserGroupInformation#doAs. * That's why it is important to create SessionState here rather than in the constructor. */ public void open(Map<String, String> sessionConfMap) throws HiveSQLException { sessionState = new SessionState(hiveConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); try { sessionState.reloadAuxJars(); } catch (IOException e) { String msg = "Failed to load reloadable jar file path: " + e; LOG.error(msg, e); throw new HiveSQLException(msg, e); } // Process global init file: .hiverc processGlobalInitFile(); if (sessionConfMap != null) { configureSession(sessionConfMap); } lastAccessTime = System.currentTimeMillis(); lastIdleTime = lastAccessTime; }
@Override /** * Opens a new HiveServer2 session for the client connection. * Creates a new SessionState object that will be associated with this HiveServer2 session. * When the server executes multiple queries in the same session, * this SessionState object is reused across multiple queries. * Note that if doAs is true, this call goes through a proxy object, * which wraps the method logic in a UserGroupInformation#doAs. * That's why it is important to create SessionState here rather than in the constructor. */ public void open(Map<String, String> sessionConfMap) throws HiveSQLException { sessionState = new SessionState(hiveConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); try { sessionState.reloadAuxJars(); } catch (IOException e) { String msg = "Failed to load reloadable jar file path: " + e; LOG.error(msg, e); throw new HiveSQLException(msg, e); } // Process global init file: .hiverc processGlobalInitFile(); if (sessionConfMap != null) { configureSession(sessionConfMap); } lastAccessTime = System.currentTimeMillis(); lastIdleTime = lastAccessTime; }
@Override /** * Opens a new HiveServer2 session for the client connection. * Creates a new SessionState object that will be associated with this HiveServer2 session. * When the server executes multiple queries in the same session, * this SessionState object is reused across multiple queries. * Note that if doAs is true, this call goes through a proxy object, * which wraps the method logic in a UserGroupInformation#doAs. * That's why it is important to create SessionState here rather than in the constructor. */ public void open(Map<String, String> sessionConfMap) throws HiveSQLException { sessionState = new SessionState(hiveConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); SessionState.start(sessionState); try { sessionState.reloadAuxJars(); } catch (IOException e) { String msg = "Failed to load reloadable jar file path: " + e; LOG.error(msg, e); throw new HiveSQLException(msg, e); } // Process global init file: .hiverc processGlobalInitFile(); if (sessionConfMap != null) { configureSession(sessionConfMap); } lastAccessTime = System.currentTimeMillis(); lastIdleTime = lastAccessTime; }
sessionState = new SessionState(sessionConf, username); sessionState.setUserIpAddress(ipAddress); sessionState.setIsHiveServerQuery(true); sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses()); sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe());