@Before public void setup() throws Exception { HiveConf conf = new HiveConf(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); MetricsFactory.init(conf); HiveSession session = mock(HiveSession.class); when(session.getHiveConf()).thenReturn(conf); when(session.getSessionState()).thenReturn(mock(SessionState.class)); when(session.getUserName()).thenReturn("userName"); operation = new SQLOperation(session, "select * from dummy", Maps.<String, String>newHashMap(), false, 0L); metrics = (CodahaleMetrics) MetricsFactory.getInstance(); }
@Override public void run() { try { Hive.set(session.getSessionHive()); OperationHandle handle = session.getTables("catalog", "schema", "table", null); session.closeOperation(handle); } catch (Exception e) { throw new RuntimeException(e); } finally { try { ready.await(); } catch (InterruptedException | BrokenBarrierException e) { // ignore } } } }).start();
/** * Execute statement asynchronously on the server with a timeout. This is a non-blocking call */ @Override public OperationHandle executeStatementAsync(SessionHandle sessionHandle, String statement, Map<String, String> confOverlay, long queryTimeout) throws HiveSQLException { HiveSession session = sessionManager.getSession(sessionHandle); // need to reset the monitor, as operation handle is not available down stream, Ideally the // monitor should be associated with the operation handle. session.getSessionState().updateProgressMonitor(null); OperationHandle opHandle = session.executeStatementAsync(statement, confOverlay, queryTimeout); LOG.debug(sessionHandle + ": executeStatementAsync()"); return opHandle; }
@Override public Integer getValue() { long sum = 0; long currentTime = System.currentTimeMillis(); for (HiveSession s : getSessions()) { if (s.getNoOperationTime() == 0L) { sum += currentTime - s.getLastAccessTime(); } } // in case of an overflow return -1 return (int) sum != sum ? -1 : (int) sum; } };
protected void createOperationLog() { if (parentSession.isOperationLogEnabled()) { File operationLogFile = new File(parentSession.getOperationLogSessionDir(), queryState.getQueryId()); isOperationLogEnabled = true; // create OperationLog object with above log file operationLog = new OperationLog(opHandle.toString(), operationLogFile, parentSession.getHiveConf()); } }
session.setSessionManager(this); session.setOperationManager(operationManager); try { session.open(sessionConf); } catch (Exception e) { LOG.warn("Failed to open session", e); try { session.close(); } catch (Throwable t) { LOG.warn("Error closing session", t); session.setOperationLogSessionDir(operationLogRootDir); LOG.warn("Failed to execute session hooks", e); try { session.close(); } catch (Throwable t) { LOG.warn("Error closing session", t); synchronized (sessionAddLock) { if (allowSessions) { handleToSession.put(session.getSessionHandle(), session); isAdded = true; LOG.info("Session opened, " + session.getSessionHandle() + ", current sessions:" + getOpenSessionCount()); return session;
break; if (sessionTimeout > 0 && session.getLastAccessTime() + sessionTimeout <= current && (!checkOperation || session.getNoOperationTime() > sessionTimeout)) { SessionHandle handle = session.getSessionHandle(); LOG.warn("Session " + handle + " is Timed-out (last access : " + new Date(session.getLastAccessTime()) + ") and will be closed"); try { closeSession(handle); session.closeExpiredOperations();
private void tearDownSessionIO() { ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); }
@Override public String getSessionUser() { return hiveSession.getUserName(); }
@Override public HiveConf getSessionConf() { return hiveSession.getHiveConf(); }
protected Operation(HiveSession parentSession, Map<String, String> confOverlay, OperationType opType) { this.parentSession = parentSession; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); opTerminateMonitorLatch = new CountDownLatch(1); beginTime = System.currentTimeMillis(); lastAccessTime = beginTime; operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); scheduledExecutorService = Executors.newScheduledThreadPool(1); currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); queryState = new QueryState.Builder() .withConfOverlay(confOverlay) .withGenerateNewQueryId(true) .withHiveConf(parentSession.getHiveConf()) .build(); }
/** * Execute statement on the server with a timeout. This is a blocking call. */ @Override public OperationHandle executeStatement(SessionHandle sessionHandle, String statement, Map<String, String> confOverlay, long queryTimeout) throws HiveSQLException { HiveSession session = sessionManager.getSession(sessionHandle); // need to reset the monitor, as operation handle is not available down stream, Ideally the // monitor should be associated with the operation handle. session.getSessionState().updateProgressMonitor(null); OperationHandle opHandle = session.executeStatement(statement, confOverlay, queryTimeout); LOG.debug(sessionHandle + ": executeStatement()"); return opHandle; }
@Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { HiveConf conf = new HiveConf(TestQueryHooks.class); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.set(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS.varname, QueryInfoVerificationHook.class.getName()); SessionState.start(conf); HiveSession mockHiveSession = mock(HiveSession.class); when(mockHiveSession.getHiveConf()).thenReturn(conf); when(mockHiveSession.getSessionState()).thenReturn(SessionState.get()); SQLOperation sqlOperation = new SQLOperation(mockHiveSession, QUERY, ImmutableMap.of(), false, 0); sqlOperation.run(); }
public SQLOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runInBackground, long queryTimeout) { // TODO: call setRemoteUser in ExecuteStatementOperation or higher. super(parentSession, statement, confOverlay, runInBackground); this.runAsync = runInBackground; this.queryTimeout = queryTimeout; long timeout = HiveConf.getTimeVar(queryState.getConf(), HiveConf.ConfVars.HIVE_QUERY_TIMEOUT_SECONDS, TimeUnit.SECONDS); if (timeout > 0 && (queryTimeout <= 0 || timeout < queryTimeout)) { this.queryTimeout = timeout; } this.operationLogCleanupDelayMs = HiveConf.getTimeVar(queryState.getConf(), HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_CLEANUP_DELAY, TimeUnit.MILLISECONDS); setupSessionIO(parentSession.getSessionState()); queryInfo = new QueryInfo(getState().toString(), getParentSession().getUserName(), getExecutionEngine(), getHandle().getHandleIdentifier().toString()); Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { submittedQryScp = metrics.createScope(MetricsConstant.HS2_SUBMITTED_QURIES); } }
for (HiveSession hiveSession: hiveSessions) { if (!HttpServer.hasAccess(remoteUser, hiveSession.getUserName(), ctx, request)) { continue; out.print( hiveSession.getUserName() ); out.write("</td>\n <td>"); out.print( hiveSession.getIpAddress() ); out.write("</td>\n <td>"); out.print( hiveSession.getOpenOperationCount() ); out.write("</td>\n <td>"); out.print( (currentTime - hiveSession.getCreationTime())/1000 ); out.write("</td>\n <td>"); out.print( (currentTime - hiveSession.getLastAccessTime())/1000 ); out.write("</td>\n </tr>\n");
@Override public void closeOperation(OperationHandle opHandle) throws HiveSQLException { sessionManager.getOperationManager().getOperation(opHandle) .getParentSession().closeOperation(opHandle); LOG.debug(opHandle + ": closeOperation"); }
@Override public OperationHandle getTables(SessionHandle sessionHandle, String catalogName, String schemaName, String tableName, List<String> tableTypes) throws HiveSQLException { OperationHandle opHandle = sessionManager.getSession(sessionHandle) .getTables(catalogName, schemaName, tableName, tableTypes); LOG.debug(sessionHandle + ": getTables()"); return opHandle; }
assertNotNull(sessionHandle); HiveSession session = service.getSessionManager().getSession(sessionHandle); OperationHandle op1 = session.executeStatementAsync("show databases", null); assertNotNull(op1); client.closeTransport(); OperationHandle op2 = session.executeStatementAsync("show databases", null); assertNotNull(op2);
@Override public void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory, String tokenStr) throws HiveSQLException { sessionManager.getSession(sessionHandle). cancelDelegationToken(authFactory, tokenStr); LOG.info(sessionHandle + ": cancelDelegationToken()"); }
@Override public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { sessionManager.getOperationManager().getOperation(opHandle) .getParentSession().cancelOperation(opHandle); LOG.debug(opHandle + ": cancelOperation()"); }