@Override public void run(HookContext hookContext) throws Exception { try { EventLogger logger = EventLogger.getInstance(hookContext.getConf()); logger.handle(hookContext); } catch (Exception e) { LOG.error("Got exceptoin while processing event: ", e); } } }
Configuration conf = hookContext.getConf(); boolean justStarted = conf.getBoolean(SCRIPT_STARTED_PARAM, true); if (justStarted) {
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf();
public HiveDAGTransformer(HookContext hookContext) { conf = hookContext.getConf(); tmpDir = AmbroseHiveUtil.getJobTmpDir(conf, false); localTmpDir = AmbroseHiveUtil.getJobTmpDir(conf, true); queryPlan = hookContext.getQueryPlan(); allTasks = Utilities.getMRTasks(queryPlan.getRootTasks()); if (!allTasks.isEmpty()) { createNodeIdToDAGNode(); } }
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf();
writeEdges(writer, edges, hookContext.getConf()); writeVertices(writer, vertices); writer.endObject();
final long currentTime = System.currentTimeMillis(); final HiveConf conf = new HiveConf(hookContext.getConf()); final QueryState queryState = hookContext.getQueryState(); final String queryId = queryState.getQueryId();
HiveConf.getBoolVar(hookContext.getConf(), ConfVars.HIVETEZHS2USERACCESS); domainReaders = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, requestuser, loginUser); domainWriters = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, requestuser, loginUser); SessionState.get().setATSDomainId(domainId);
HiveConf.getBoolVar(hookContext.getConf(), ConfVars.HIVETEZHS2USERACCESS); domainReaders = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_VIEW_ACLS, addHs2User, requestuser, loginUser); domainWriters = Utilities.getAclStringWithHiveModification(hookContext.getConf(), TezConfiguration.TEZ_AM_MODIFY_ACLS, addHs2User, requestuser, loginUser); SessionState.get().setATSDomainId(domainId);
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return;
final long currentTime = System.currentTimeMillis(); final HiveConf conf = new HiveConf(hookContext.getConf()); final QueryState queryState = hookContext.getQueryState(); final String queryId = queryState.getQueryId();
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return;
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK || hookContext.getHookType() == HookContext.HookType.ON_FAILURE_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return; } LOG.info("Executing post execution hook to print workload manager events summary.."); SessionState.LogHelper console = SessionState.getConsole(); QueryPlan plan = hookContext.getQueryPlan(); if (plan == null) { return; } List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks()); for (TezTask tezTask : rootTasks) { WmContext wmContext = tezTask.getDriverContext().getCtx().getWmContext(); if (wmContext != null) { wmContext.printJson(console); wmContext.shortPrint(console); } } }
@Override public void run(HookContext hookContext) throws Exception { HiveConf conf = hookContext.getConf(); Properties allConfProps = conf.getAllProperties(); String queryId = AmbroseHiveUtil.getHiveQueryId(conf);
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return;
@Override public void run(HookContext hookContext) throws Exception { assert (hookContext.getHookType() == HookContext.HookType.POST_EXEC_HOOK); HiveConf conf = hookContext.getConf(); if (!"tez".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) { return;
HiveConf conf = new HiveConf(hookContext.getConf()); List<ExecDriver> mrTasks = Utilities.getMRTasks(plan.getRootTasks()); List<TezTask> tezTasks = Utilities.getTezTasks(plan.getRootTasks());
@Override public void run(HookContext hookContext) throws Exception { String queryId = AmbroseHiveUtil.getHiveQueryId(hookContext.getConf()); EmbeddedAmbroseHiveProgressReporter reporter = getEmbeddedProgressReporter(); HiveDAGTransformer transformer = new HiveDAGTransformer(hookContext); //conditional tasks may be filtered out by Hive at runtime. We them as //'complete' Map<String, DAGNode<Job>> nodeIdToDAGNode = reporter.getNodeIdToDAGNode(); sendFilteredJobsStatus(queryId, reporter, nodeIdToDAGNode); if (transformer.getTotalMRJobs() == 0) { return; } waitBetween(hookContext, reporter, queryId); nodeIdToDAGNode = transformer.getNodeIdToDAGNode(); reporter.setNodeIdToDAGNode(nodeIdToDAGNode); reporter.setTotalMRJobs(transformer.getTotalMRJobs()); reporter.sendDagNodeNameMap(queryId, nodeIdToDAGNode); }
HiveConf conf = hookContext.getConf(); QueryPlan plan = hookContext.getQueryPlan(); List<TezTask> rootTasks = Utilities.getTezTasks(plan.getRootTasks());
@Override public void run(HookContext hookContext) throws Exception { HiveConf conf = hookContext.getConf(); Set<ReadEntity> inputs = hookContext.getQueryPlan().getInputs();