@Override public HiveConf getConf() { return queryState.getConf(); }
/** * Resets QueryState to get new queryId on Driver reuse. */ public void resetQueryState() { // Note: Driver cleanup for reuse at this point is not very clear. The assumption here is that // repeated compile/execute calls create new contexts, plan, etc., so we don't need to worry // propagating queryState into those existing fields, or resetting them. releaseResources(); this.queryState = new QueryState(queryState.getConf()); } }
public String getQueryId() { return queryState.getQueryId(); } }
public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) throws SemanticException { BaseSemanticAnalyzer sem = getInternal(queryState, tree); if(queryState.getHiveOperation() == null) { String query = queryState.getQueryString(); if(query != null && query.length() > 30) { query = query.substring(0, 30); } String msg = "Unknown HiveOperation for query='" + query + "' queryId=" + queryState.getQueryId(); //throw new IllegalStateException(msg); LOG.debug(msg); } return sem; }
public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException { try { this.queryState = queryState; this.conf = queryState.getConf(); this.db = db; rootTasks = new ArrayList<Task<?>>(); LOG = LoggerFactory.getLogger(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap<String, String>(); inputs = new LinkedHashSet<ReadEntity>(); outputs = new LinkedHashSet<WriteEntity>(); txnManager = queryState.getTxnManager(); } catch (Exception e) { throw new SemanticException(e); } }
final String queryId = Strings.isNullOrEmpty(queryState.getQueryId()) ? QueryPlan.makeQueryId() : queryState.getQueryId(); ((Configurable) queryTxnMgr).setConf(conf); queryState.setTxnManager(queryTxnMgr); hookCtx.setIpAddress(SessionState.get().getUserIpAddress()); hookCtx.setCommand(command); hookCtx.setHiveOperation(queryState.getHiveOperation()); if ((queryState.getHiveOperation() != null) && queryState.getHiveOperation().equals(HiveOperation.REPLDUMP)) { setLastReplIdForDump(queryState.getConf()); queryState.getHiveOperation(), schema); if (queryState.getHiveOperation() != HiveOperation.KILL_QUERY) { doAuthorization(queryState.getHiveOperation(), sem, command);
/** * Invoked before runInternal(). * Set up some preconditions, or configurations. */ protected void beforeRun() { ShimLoader.getHadoopShims().setHadoopQueryContext(queryState.getQueryId()); createOperationLog(); LogUtils.registerLoggingContext(queryState.getConf()); }
public Driver(HiveConf conf, Context ctx) { this(new QueryState(conf), null); this.ctx = ctx; }
} else { HiveOperation opType = commandType.get(tree.getType()); queryState.setCommandType(opType); switch (tree.getType()) { case HiveParser.TOK_EXPLAIN: case HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT: case HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS: queryState.setCommandType(commandType.get(child.getType())); return new DDLSemanticAnalyzer(queryState); queryState.setCommandType(opType); return new DDLSemanticAnalyzer(queryState); case HiveParser.TOK_ALTERVIEW_RENAME: opType = commandType.get(child.getType()); queryState.setCommandType(opType); return new DDLSemanticAnalyzer(queryState); queryState.setCommandType(HiveOperation.ALTERVIEW_AS); return new SemanticAnalyzer(queryState); case HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE: opType = commandType.get(child.getType()); queryState.setCommandType(opType); return new DDLSemanticAnalyzer(queryState); case HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REBUILD: opType = commandType.get(child.getType()); queryState.setCommandType(opType);
private boolean startImplicitTxn(HiveTxnManager txnManager) throws LockException { boolean shouldOpenImplicitTxn = !ctx.isExplainPlan(); switch (queryState.getHiveOperation() == null ? HiveOperation.QUERY : queryState.getHiveOperation()) { case COMMIT: case ROLLBACK: if(!txnManager.isTxnOpen()) { throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, queryState.getHiveOperation().getOperationName());
HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable); } finally { queryState.setCommandType(operation);
@Test public void testOutputPlanVectorizationJsonShouldMatch() throws Exception { QueryState qs = mock(QueryState.class); when(qs.getHiveOperation()).thenReturn(HiveOperation.EXPLAIN); HiveConf hiveConf = new HiveConf(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); when(qs.getConf()).thenReturn(hiveConf); uut.queryState = qs; JsonNode result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString()); JsonNode expected = objectMapper.readTree("{\"enabled\":true,\"enabledConditionsMet\":[\"hive.vectorized.execution.enabled IS true\"]}"); assertEquals(expected, result); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); result = objectMapper.readTree(uut.outputPlanVectorization(null, true).getRight().toString()); expected = objectMapper.readTree("{\"enabled\":false,\"enabledConditionsNotMet\":[\"hive.vectorized.execution.enabled IS false\"]}"); assertEquals(expected, result); }
public void run(QueryState queryState, Set<ReadEntity> inputs, Set<WriteEntity> outputs, UserGroupInformation ugi) throws Exception { LogHelper console = SessionState.getConsole(); if (console == null) { return; } if (queryState != null) { console.printInfo("PREHOOK: query: " + queryState.getQueryString().trim(), false); console.printInfo("PREHOOK: type: " + queryState.getCommandType(), false); } printEntities(console, inputs, "PREHOOK: Input: "); printEntities(console, outputs, "PREHOOK: Output: "); }
String queryId = QueryPlan.makeQueryId(); queryConf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId); setApplicationTag(queryConf, queryId); QueryState queryState = new QueryState(queryConf); if (lineageState != null) { queryState.setLineageState(lineageState);
boolean analyzeCreateTable(ASTNode child) throws SemanticException { if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) { // if it is not CTAS, we don't need to go further and just return if ((child = analyzeCreateTable(ast, qb, null)) == null) { return true; } } else { queryState.setCommandType(HiveOperation.QUERY); } return false; }
throw toSQLException("Error while compiling statement", response); if (queryState.getQueryTag() != null && queryState.getQueryId() != null) { parentSession.updateQueryTag(queryState.getQueryId(), queryState.getQueryTag());
if (queryState.getCommandType() .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) || queryState.getCommandType() .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) { throw new SemanticException("AlterTable UpdateStats " + entry.getKey()
private void handleLineage(LoadTableDesc ltd, Operator output) throws SemanticException { if (ltd != null) { queryState.getLineageState() .mapDirToOp(ltd.getSourcePath(), output); } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; String tName = Utilities.getDbTableName(tableDesc.getTableName())[1]; try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), tName, tableDesc.isExternal()); } catch (MetaException|HiveException e) { throw new SemanticException(e); } queryState.getLineageState() .mapDirToOp(tlocation, output); } }
@Override public Object run() throws HiveSQLException { assert (!parentHive.allowClose()); Hive.set(parentHive); // TODO: can this result in cross-thread reuse of session state? SessionState.setCurrentSessionState(parentSessionState); PerfLogger.setPerfLogger(SessionState.getPerfLogger()); LogUtils.registerLoggingContext(queryState.getConf()); ShimLoader.getHadoopShims().setHadoopQueryContext(queryState.getQueryId()); try { if (asyncPrepare) { prepare(queryState); } runQuery(); } catch (HiveSQLException e) { // TODO: why do we invent our own error path op top of the one from Future.get? setOperationException(e); LOG.error("Error running hive query: ", e); } finally { LogUtils.unregisterLoggingContext(); // If new hive object is created by the child thread, then we need to close it as it might // have created a hms connection. Call Hive.closeCurrent() that closes the HMS connection, causes // HMS connection leaks otherwise. Hive.closeCurrent(); } return null; } };
public Driver(HiveConf conf, String userName) { this(new QueryState(conf), userName); }