@Override public void run() { try { QueryPlan plan = hookContext.getQueryPlan(); if (plan == null) { return; String opId = hookContext.getOperationId(); long queryStartTime = plan.getQueryStartTime(); String user = hookContext.getUgi().getShortUserName(); String requestuser = hookContext.getUserName(); if (hookContext.getUserName() == null ){ requestuser = hookContext.getUgi().getUserName() ; switch(hookContext.getHookType()) { case PRE_EXEC_HOOK: ExplainConfiguration config = new ExplainConfiguration(); String query = plan.getQueryStr(); JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name(); String hiveInstanceAddress = hookContext.getHiveInstanceAddress(); if (hiveInstanceAddress == null) { hiveInstanceAddress = InetAddress.getLocalHost().getHostAddress(); String hiveInstanceType = hookContext.isHiveServerQuery() ? "HS2" : "CLI"; ApplicationId llapId = determineLlapId(conf, plan);
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK || hookContext.getHookType() == HookContext.HookType.ON_FAILURE_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return; } LOG.info("Executing post execution hook to print workload manager events summary.."); SessionState.LogHelper console = SessionState.getConsole(); QueryPlan plan = hookContext.getQueryPlan(); if (plan == null) { return; } List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks()); for (TezTask tezTask : rootTasks) { WmContext wmContext = tezTask.getDriverContext().getCtx().getWmContext(); if (wmContext != null) { wmContext.printJson(console); wmContext.shortPrint(console); } } }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
private String getRequestUser(HookContext hookContext) { String requestuser = hookContext.getUserName(); if (requestuser == null) { requestuser = hookContext.getUgi().getUserName(); } return requestuser; }
@Override public void run(HookContext hookContext) throws Exception { if (hookContext.getHookType() == HookType.ON_FAILURE_HOOK) { Throwable exception = hookContext.getException(); if (exception != null) { if (exception.getMessage() != null && exception.getMessage().contains("Vertex failed,")) { retryPossible = true; } } } } }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); boolean isExplain = hookContext.getQueryPlan().isExplain(); this.run(ss,inputs,outputs,ugi, isExplain); }
private HiveHookEventProto getPreHookEvent(HookContext hookContext) { QueryPlan plan = hookContext.getQueryPlan(); LOG.info("Received pre-hook notification for: " + plan.getQueryId()); HiveConf conf = new HiveConf(hookContext.getConf()); List<ExecDriver> mrTasks = Utilities.getMRTasks(plan.getRootTasks()); List<TezTask> tezTasks = Utilities.getTezTasks(plan.getRootTasks()); builder.addAllTablesRead(getTablesFromEntitySet(hookContext.getInputs())); builder.addAllTablesWritten(getTablesFromEntitySet(hookContext.getOutputs())); if (hookContext.getOperationId() != null) { builder.setOperationId(hookContext.getOperationId()); addMapEntry(builder, OtherInfoType.SESSION_ID, hookContext.getSessionId()); String logID = conf.getLogIdVar(hookContext.getSessionId()); addMapEntry(builder, OtherInfoType.INVOKER_INFO, logID); addMapEntry(builder, OtherInfoType.THREAD_NAME, hookContext.getThreadId()); addMapEntry(builder, OtherInfoType.VERSION, Integer.toString(VERSION)); addMapEntry(builder, OtherInfoType.CLIENT_IP_ADDRESS, hookContext.getIpAddress()); String hiveInstanceAddress = hookContext.getHiveInstanceAddress(); if (hiveInstanceAddress == null) { try { String hiveInstanceType = hookContext.isHiveServerQuery() ? "HS2" : "CLI"; addMapEntry(builder, OtherInfoType.HIVE_INSTANCE_TYPE, hiveInstanceType);
@Override public void run(HookContext hookContext) { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); QueryPlan plan = hookContext.getQueryPlan(); Index index = hookContext.getIndex(); SessionState ss = SessionState.get(); if (ss != null && index != null if (queryTime == 0) queryTime = System.currentTimeMillis(); long duration = System.currentTimeMillis() - queryTime; writer.name("user").value(hookContext.getUgi().getUserName()); writer.name("timestamp").value(queryTime/1000); writer.name("duration").value(duration); writer.name("jobIds"); writer.beginArray(); List<TaskRunner> tasks = hookContext.getCompleteTaskList(); if (tasks != null && !tasks.isEmpty()) { for (TaskRunner task: tasks) { writeEdges(writer, edges, hookContext.getConf()); writeVertices(writer, vertices); writer.endObject();
event.setInputs(hookContext.getInputs()); event.setOutputs(hookContext.getOutputs()); event.setHookType(hookContext.getHookType()); final UserGroupInformation ugi = hookContext.getUgi() == null ? Utils.getUGI() : hookContext.getUgi(); event.setUgi(ugi); event.setUser(getUser(hookContext.getUserName(), hookContext.getUgi())); event.setOperation(OPERATION_MAP.get(hookContext.getOperationName())); event.setQueryId(hookContext.getQueryPlan().getQueryId()); event.setQueryStr(hookContext.getQueryPlan().getQueryStr()); event.setQueryStartTime(hookContext.getQueryPlan().getQueryStartTime()); event.setQueryType(hookContext.getQueryPlan().getQueryPlan().getQueryType()); event.setLineageInfo(hookContext.getLinfo());
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
public HiveDAGTransformer(HookContext hookContext) { conf = hookContext.getConf(); tmpDir = AmbroseHiveUtil.getJobTmpDir(conf, false); localTmpDir = AmbroseHiveUtil.getJobTmpDir(conf, true); queryPlan = hookContext.getQueryPlan(); allTasks = Utilities.getMRTasks(queryPlan.getRootTasks()); if (!allTasks.isEmpty()) { createNodeIdToDAGNode(); } }
if (SessionState.get() != null) { if (SessionState.get().getATSDomainId() == null) { domainId = ATS_DOMAIN_PREFIX + hookContext.getSessionId(); String requestuser = hookContext.getUserName(); if (hookContext.getUserName() == null ){ requestuser = hookContext.getUgi().getShortUserName() ; HiveConf.getBoolVar(hookContext.getConf(), ConfVars.HIVETEZHS2USERACCESS); domainReaders = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, requestuser, loginUser); domainWriters = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, requestuser, loginUser); SessionState.get().setATSDomainId(domainId);
private boolean ignoreHDFSPathsinProcessQualifiedName() { switch (context.getHiveOperation()) { case LOAD: case IMPORT: return hasPartitionEntity(getHiveContext().getOutputs()); case EXPORT: return hasPartitionEntity(getHiveContext().getInputs()); case QUERY: return true; } return false; }
@Override public void run() { try { QueryPlan plan = hookContext.getQueryPlan(); if (plan == null) { return; String opId = hookContext.getOperationId(); long queryStartTime = plan.getQueryStartTime(); String user = hookContext.getUgi().getUserName(); String requestuser = hookContext.getUserName(); if (hookContext.getUserName() == null ){ requestuser = hookContext.getUgi().getUserName() ; switch(hookContext.getHookType()) { case PRE_EXEC_HOOK: ExplainTask explain = new ExplainTask();
hookContext = new HookContext(plan, queryState, ctx.getPathToCS(), ss.getUserFromAuthenticator(), ss.getUserIpAddress(), InetAddress.getLocalHost().getHostAddress(), operationId, ss.getSessionId(), Thread.currentThread().getName(), ss.isHiveServerQuery(), perfLogger); hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK); continue; hookContext.addCompleteTask(tskRun); queryDisplay.setTaskResult(tskRun.getTask().getId(), tskRun.getTaskResult()); hookContext.setHookType(HookContext.HookType.POST_EXEC_HOOK);
final long currentTime = System.currentTimeMillis(); final HiveConf conf = new HiveConf(hookContext.getConf()); final QueryState queryState = hookContext.getQueryState(); final String queryId = queryState.getQueryId(); for (String key : hookContext.getPerfLogger().getEndTimes().keySet()) { durations.put(key, hookContext.getPerfLogger().getDuration(key));
); HookContext hookContext = new HookContext(queryPlan, null); hookContext.setInputs(readEntities); hookContext.setOutputs(writeEntities); hookContext.setConf(hiveConf);
private HiveHookEventProto getPostHookEvent(HookContext hookContext, boolean success) { QueryPlan plan = hookContext.getQueryPlan(); LOG.info("Received post-hook notification for: " + plan.getQueryId()); HiveHookEventProto.Builder builder = HiveHookEventProto.newBuilder(); builder.setEventType(EventType.QUERY_COMPLETED.name()); builder.setTimestamp(clock.getTime()); builder.setHiveQueryId(plan.getQueryId()); builder.setUser(getUser(hookContext)); builder.setRequestUser(getRequestUser(hookContext)); if (hookContext.getOperationId() != null) { builder.setOperationId(hookContext.getOperationId()); } addMapEntry(builder, OtherInfoType.STATUS, Boolean.toString(success)); JSONObject perfObj = new JSONObject(); for (String key : hookContext.getPerfLogger().getEndTimes().keySet()) { perfObj.put(key, hookContext.getPerfLogger().getDuration(key)); } addMapEntry(builder, OtherInfoType.PERF, perfObj.toString()); return builder.build(); }
@Override public void run(HookContext hookContext) throws Exception { QueryProperties qProps = hookContext.getQueryPlan().getQueryProperties(); if (null == qProps) { return; // its a ddl query. } if (qProps.usesScript()) { throw new HiveAccessControlException("Query with transform clause is disallowed in" + " current configuration."); } } }
@Override public void run(HookContext hookContext) throws Exception { try { EventLogger logger = EventLogger.getInstance(hookContext.getConf()); logger.handle(hookContext); } catch (Exception e) { LOG.error("Got exceptoin while processing event: ", e); } } }