public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) throws SemanticException { BaseSemanticAnalyzer sem = getInternal(queryState, tree); if(queryState.getHiveOperation() == null) { String query = queryState.getQueryString(); if(query != null && query.length() > 30) { query = query.substring(0, 30); } String msg = "Unknown HiveOperation for query='" + query + "' queryId=" + queryState.getQueryId(); //throw new IllegalStateException(msg); LOG.debug(msg); } return sem; }
private boolean startImplicitTxn(HiveTxnManager txnManager) throws LockException { boolean shouldOpenImplicitTxn = !ctx.isExplainPlan(); switch (queryState.getHiveOperation() == null ? HiveOperation.QUERY : queryState.getHiveOperation()) { case COMMIT: case ROLLBACK: if(!txnManager.isTxnOpen()) { throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, queryState.getHiveOperation().getOperationName());
@Override public void update(BaseSemanticAnalyzer sem) { this.inputs = sem.getInputs(); this.outputs = sem.getOutputs(); this.commandType = sem.getQueryState().getHiveOperation(); }
@Override public void update(BaseSemanticAnalyzer sem) { this.inputs = sem.getInputs(); this.outputs = sem.getOutputs(); this.commandType = sem.getQueryState().getHiveOperation(); }
private QueryResultsCache.QueryInfo createCacheQueryInfoForQuery(QueryResultsCache.LookupInfo lookupInfo) { long queryTime = SessionState.get().getQueryCurrentTimestamp().toEpochMilli(); return new QueryResultsCache.QueryInfo(queryTime, lookupInfo, queryState.getHiveOperation(), resultSchema, getTableAccessInfo(), getColumnAccessInfo(), inputs); }
/** * Some initial checks for a query to see if we can look this query up in the results cache. */ private boolean queryTypeCanUseCache() { if (this instanceof ColumnStatsSemanticAnalyzer) { // Column stats generates "select compute_stats() .." queries. // Disable caching for these. return false; } if (queryState.getHiveOperation() != HiveOperation.QUERY) { return false; } if (qb.getParseInfo().isAnalyzeCommand()) { return false; } if (qb.getParseInfo().hasInsertTables()) { return false; } // HIVE-19096 - disable for explain analyze if (ctx.getExplainAnalyze() != null) { return false; } return true; }
@Test public void testOutputPlanVectorizationJsonShouldMatch() throws Exception { QueryState qs = mock(QueryState.class); when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN); HiveConf hiveConf = new HiveConf(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); when(qs.getConf()).thenReturn(hiveConf); uut.queryState = qs; JsonNode result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString()); JsonNode expected = objectMapper.readTree("{\"enabled\":true,\"enabledConditionsMet\":[\"hive.vectorized.execution.enabled IS true\"]}"); assertEquals(expected, result); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString()); expected = objectMapper.readTree("{\"enabled\":false,\"enabledConditionsNotMet\":[\"hive.vectorized.execution.enabled IS false\"]}"); assertEquals(expected, result); }
@Test public void testCollectAuthRelatedEntitiesJsonShouldMatch() throws Exception { QueryState qs = mock(QueryState.class); when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN); uut.queryState = qs; SessionState.start(new HiveConf(ExplainTask.class)); // SessionState.get().setCommandType(HiveOperation.EXPLAIN); HiveAuthenticationProvider authenticationProviderMock = mock(HiveAuthenticationProvider.class); when(authenticationProviderMock.getUserName()).thenReturn("test-user"); SessionState.get().setAuthenticator(authenticationProviderMock); SessionState.get().setAuthorizer(mock(HiveAuthorizationProvider.class)); ExplainWork work = mockExplainWork(); JsonNode result = objectMapper.readTree(uut.collectAuthRelatedEntities(null, work).toString()); JsonNode expected = objectMapper.readTree("{\"CURRENT_USER\":\"test-user\"," + "\"OPERATION\":\"EXPLAIN\",\"INPUTS\":[],\"OUTPUTS\":[]}"); assertEquals(expected, result); }
HiveOperation operation = queryState.getHiveOperation(); Driver.doAuthorization(queryState.getHiveOperation(), analyzer, ""); } finally { SessionState.get().setActiveAuthorizer(delegate);
HiveOperation operation = queryState.getHiveOperation(); Driver.doAuthorization(queryState.getHiveOperation(), analyzer, ""); } finally { SessionState.get().setActiveAuthorizer(delegate);
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable);
HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable);
HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable);
hookCtx.setIpAddress(SessionState.get().getUserIpAddress()); hookCtx.setCommand(command); hookCtx.setHiveOperation(queryState.getHiveOperation()); if ((queryState.getHiveOperation() != null) && queryState.getHiveOperation().equals(HiveOperation.REPLDUMP)) { setLastReplIdForDump(queryState.getConf()); queryState.getHiveOperation(), schema); if (queryState.getHiveOperation() != HiveOperation.KILL_QUERY) { doAuthorization(queryState.getHiveOperation(), sem, command);
HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable);
qb.getParseInfo().setOuterQueryLimit(limit.intValue()); if (!queryState.getHiveOperation().equals(HiveOperation.CREATEVIEW)) { curr = genFileSinkPlan(dest, qb, curr);
qb.getParseInfo().setOuterQueryLimit(limit.intValue()); if (!queryState.getHiveOperation().equals(HiveOperation.CREATEVIEW)) { curr = genFileSinkPlan(dest, qb, curr);
} else if (!queryTxnMgr.isTxnOpen() && queryState.getHiveOperation() == HiveOperation.REPLLOAD) {