@Override public HiveConf getSessionConf() { return hiveSession.getHiveConf(); }
private String getQueryId(Operation operation) { return operation.getParentSession().getHiveConf().getVar(ConfVars.HIVEQUERYID); }
public static ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync, long queryTimeout) throws HiveSQLException { String cleanStatement = HiveStringUtils.removeComments(statement); String[] tokens = cleanStatement.trim().split("\\s+"); CommandProcessor processor = null; try { processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf()); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getSQLState(), e); } if (processor == null) { // runAsync, queryTimeout makes sense only for a SQLOperation // Pass the original statement to SQLOperation as sql parser can remove comments by itself return new SQLOperation(parentSession, statement, confOverlay, runAsync, queryTimeout); } return new HiveCommandOperation(parentSession, cleanStatement, processor, confOverlay); } }
protected GetTablesOperation(HiveSession parentSession, String catalogName, String schemaName, String tableName, List<String> tableTypes) { super(parentSession, OperationType.GET_TABLES); this.catalogName = catalogName; this.schemaName = schemaName; this.tableName = tableName; String tableMappingStr = getParentSession().getHiveConf(). getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); if (tableTypes != null) { tableTypeList = new ArrayList<String>(); for (String tableType : tableTypes) { tableTypeList.addAll(Arrays.asList(tableTypeMapping.mapToHiveType(tableType.trim()))); } } else { tableTypeList = null; } this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); LOG.info("Starting GetTablesOperation with the following parameters: " + "catalogName={}, schemaName={}, tableName={}, tableTypes={}", catalogName, schemaName, tableName, tableTypeList != null ? tableTypeList.toString() : "null"); }
protected void createOperationLog() { if (parentSession.isOperationLogEnabled()) { File operationLogFile = new File(parentSession.getOperationLogSessionDir(), queryState.getQueryId()); isOperationLogEnabled = true; // create OperationLog object with above log file operationLog = new OperationLog(opHandle.toString(), operationLogFile, parentSession.getHiveConf()); } }
protected GetTableTypesOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_TABLE_TYPES); String tableMappingStr = getParentSession().getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); LOG.info("Starting GetTableTypesOperation"); }
protected Operation(HiveSession parentSession, Map<String, String> confOverlay, OperationType opType) { this.parentSession = parentSession; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); opTerminateMonitorLatch = new CountDownLatch(1); beginTime = System.currentTimeMillis(); lastAccessTime = beginTime; operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); scheduledExecutorService = Executors.newScheduledThreadPool(1); currentStateScope = updateOperationStateMetrics(null, MetricsConstant.OPERATION_PREFIX, MetricsConstant.COMPLETED_OPERATION_PREFIX, state); queryState = new QueryState.Builder() .withConfOverlay(confOverlay) .withGenerateNewQueryId(true) .withHiveConf(parentSession.getHiveConf()) .build(); }
@Override public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) { Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { try { metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS); } catch (Exception e) { LOG.warn("Error Reporting JDO operation to Metrics system", e); } } ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext; SessionHandle sessionHandle = context.getSessionHandle(); if (sessionHandle != null) { LOG.info("Session disconnected without closing properly. "); try { boolean close = cliService.getSessionManager().getSession(sessionHandle).getHiveConf() .getBoolVar(ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT); LOG.info((close ? "" : "Not ") + "Closing the session: " + sessionHandle); if (close) { cliService.closeSession(sessionHandle); } } catch (HiveSQLException e) { LOG.warn("Failed to close session: " + e, e); } } }
@Before public void setup() throws Exception { HiveConf conf = new HiveConf(); conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true); MetricsFactory.init(conf); HiveSession session = mock(HiveSession.class); when(session.getHiveConf()).thenReturn(conf); when(session.getSessionState()).thenReturn(mock(SessionState.class)); when(session.getUserName()).thenReturn("userName"); operation = new SQLOperation(session, "select * from dummy", Maps.<String, String>newHashMap(), false, 0L); metrics = (CodahaleMetrics) MetricsFactory.getInstance(); }
HiveConf conf = operation.getParentSession().getHiveConf(); if (operation.shouldRunAsync()) { long maxTimeout = HiveConf.getTimeVar(conf,
@Test public void testQueryInfoInHookContext() throws IllegalAccessException, ClassNotFoundException, InstantiationException, HiveSQLException { HiveConf conf = new HiveConf(TestQueryHooks.class); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.set(HiveConf.ConfVars.HIVE_QUERY_LIFETIME_HOOKS.varname, QueryInfoVerificationHook.class.getName()); SessionState.start(conf); HiveSession mockHiveSession = mock(HiveSession.class); when(mockHiveSession.getHiveConf()).thenReturn(conf); when(mockHiveSession.getSessionState()).thenReturn(SessionState.get()); SQLOperation sqlOperation = new SQLOperation(mockHiveSession, QUERY, ImmutableMap.of(), false, 0); sqlOperation.run(); }
@Override public HiveConf getSessionConf() { return hiveSession.getHiveConf(); }
@Override public HiveConf getSessionConf() { return hiveSession.getHiveConf(); }
protected Operation(HiveSession parentSession, OperationType opType, boolean runInBackground) { this.parentSession = parentSession; this.runAsync = runInBackground; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); lastAccessTime = System.currentTimeMillis(); operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); }
protected Operation(HiveSession parentSession, OperationType opType, boolean runInBackground) { this.parentSession = parentSession; this.runAsync = runInBackground; this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion()); lastAccessTime = System.currentTimeMillis(); operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(), HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS); }
protected void createOperationLog() { if (parentSession.isOperationLogEnabled()) { File operationLogFile = new File(parentSession.getOperationLogSessionDir(), queryState.getQueryId()); isOperationLogEnabled = true; // create OperationLog object with above log file operationLog = new OperationLog(opHandle.toString(), operationLogFile, parentSession.getHiveConf()); } }
@Override public String getQueryId(TOperationHandle opHandle) throws HiveSQLException { Operation operation = sessionManager.getOperationManager().getOperation( new OperationHandle(opHandle)); final String queryId = operation.getParentSession().getHiveConf().getVar(ConfVars.HIVEQUERYID); LOG.debug(opHandle + ": getQueryId() " + queryId); return queryId; }
protected GetTableTypesOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_TABLE_TYPES); String tableMappingStr = getParentSession().getHiveConf() .getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion()); }
protected GetTableTypesOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_TABLE_TYPES); String tableMappingStr = getParentSession().getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); }
protected GetTableTypesOperation(HiveSession parentSession) { super(parentSession, OperationType.GET_TABLE_TYPES); String tableMappingStr = getParentSession().getHiveConf() .getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion()); }