JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name(); String hiveInstanceAddress = hookContext.getHiveInstanceAddress(); if (hiveInstanceAddress == null) { fireAndForget( createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId, hookContext.getIpAddress(), hiveInstanceAddress, hiveInstanceType, break; case POST_EXEC_HOOK: fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations, domainId)); break; case ON_FAILURE_HOOK: fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser , false, opId, durations, domainId)); break; default:
setupAtsExecutor(conf); final String domainId = createOrGetDomain(hookContext); executor.submit(new Runnable() { @Override
JSONObject explainPlan = explain.getJSONPlan(null, null, rootTasks, plan.getFetchTask(), true, false, false); fireAndForget(conf, createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId)); break; case POST_EXEC_HOOK: fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser, true, opId)); break; case ON_FAILURE_HOOK: fireAndForget(conf, createPostHookEvent(queryId, currentTime, user, requestuser , false, opId)); break; default:
@Test public void testCreatePreHookEventJsonShhouldMatch() throws Exception { TimelineEntity timelineEntity = uut.createPreHookEvent( "test-query-id", "test-query", new org.json.JSONObject(), 0L, "test-user", "test-request-user", 0, 0, "test-opid", "client-ip-address", "hive-instance-address", "hive-instance-type", "session-id", "log-id", "thread-id", "execution-mode", Collections.<String>emptyList(), Collections.<String>emptyList(), new HiveConf(), null, "domain-id"); String resultStr = (String) timelineEntity.getOtherInfo() .get(ATSHook.OtherInfoTypes.QUERY.name()); JsonNode result = objectMapper.readTree(resultStr); JsonNode expected = objectMapper.readTree("{\"queryText\":\"test-query\"," + "\"queryPlan\":{}}"); assertEquals(expected, result); } }
JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name(); String hiveInstanceAddress = hookContext.getHiveInstanceAddress(); if (hiveInstanceAddress == null) { fireAndForget( createPreHookEvent(queryId, query, explainPlan, queryStartTime, user, requestuser, numMrJobs, numTezJobs, opId, hookContext.getIpAddress(), hiveInstanceAddress, hiveInstanceType, break; case POST_EXEC_HOOK: fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser, true, opId, durations, domainId)); break; case ON_FAILURE_HOOK: fireAndForget(createPostHookEvent(queryId, currentTime, user, requestuser , false, opId, durations, domainId)); break; default:
setupAtsExecutor(conf); final String domainId = createOrGetDomain(hookContext); executor.submit(new Runnable() { @Override