private String constructLlapLogUrl(final TezTaskAttemptID attemptID, final String containerIdString, final boolean isDone, final String nmAddress) { String dagId = attemptID.getTaskID().getVertexID().getDAGId().toString(); String filename = JOINER.join(currentHiveQueryId, "-", dagId, ".log", (isDone ? ".done" : ""), "?nm.id=", nmAddress); String url = PATH_JOINER.join(timelineServerUri, "ws", "v1", "applicationhistory", "containers", containerIdString, "logs", filename); return url; }
private static int vertexNum(TaskInfo taskInfo) { return taskInfo.getAttemptId().getTaskID().getVertexID().getId(); // Sigh... }
taskInfo.attemptId.getTaskID().getVertexID().getDAGId()); Set<Integer> vertexDepInfo = null; if (depInfo != null) {
@Override public void allocateTask(Object task, Resource capability, String[] hosts, String[] racks, Priority priority, Object containerSignature, Object clientCookie) { TezTaskAttemptID id = getTaskAttemptId(task); TaskInfo taskInfo = new TaskInfo(localityDelayConf, clock, task, clientCookie, priority, capability, hosts, racks, clock.getTime(), id); LOG.info("Received allocateRequest. task={}, priority={}, capability={}, hosts={}", task, priority, capability, Arrays.toString(hosts)); writeLock.lock(); try { if (!dagRunning && metrics != null && id != null) { metrics.setDagId(id.getTaskID().getVertexID().getDAGId().toString()); } dagRunning = true; dagStats.registerTaskRequest(hosts, racks); } finally { writeLock.unlock(); } addPendingTask(taskInfo); trySchedulingPendingTasks(); }
@Override public void allocateTask(Object task, Resource capability, ContainerId containerId, Priority priority, Object containerSignature, Object clientCookie) { // Container affinity can be implemented as Host affinity for LLAP. Not required until // 1:1 edges are used in Hive. TezTaskAttemptID id = getTaskAttemptId(task); TaskInfo taskInfo = new TaskInfo(localityDelayConf, clock, task, clientCookie, priority, capability, null, null, clock.getTime(), id); LOG.info("Received allocateRequest. task={}, priority={}, capability={}, containerId={}", task, priority, capability, containerId); writeLock.lock(); try { if (!dagRunning && metrics != null && id != null) { metrics.setDagId(id.getTaskID().getVertexID().getDAGId().toString()); } dagRunning = true; dagStats.registerTaskRequest(null, null); } finally { writeLock.unlock(); } addPendingTask(taskInfo); trySchedulingPendingTasks(); }
TerminateFragmentRequestProto.newBuilder().setQueryIdentifier( constructQueryIdentifierProto( taskAttemptId.getTaskID().getVertexID().getDAGId().getId())) .setFragmentIdentifierString(taskAttemptId.toString()).build(); communicator.sendTerminateFragment(request, nodeId.getHostname(), nodeId.getPort(),
super.registerRunningTaskAttempt(containerId, taskSpec, additionalResources, credentials, credentialsChanged, priority); int dagId = taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId(); if (currentQueryIdentifierProto == null || (dagId != currentQueryIdentifierProto.getDagIndex())) {
final String dagId = attemptId.getTaskID().getVertexID().getDAGId().toString(); final String queryId = vertex.getHiveQueryId(); final String fragmentId = LlapTezUtils.stripAttemptPrefix(fragmentIdString); int dagIdentifier = taskAttemptId.getTaskID().getVertexID().getDAGId().getId();
.setIsGuaranteed(newState).setFragmentIdentifierString(attemptId.toString()) .setQueryIdentifier(constructQueryIdentifierProto( attemptId.getTaskID().getVertexID().getDAGId().getId())).build();
builder.setQueryIdentifier(queryIdentifierProto); builder.setHiveQueryId(hiveQueryIdString); builder.setVertexIndex(tId.getTaskID().getVertexID().getId()); builder.setDagName(taskSpec.getDAGName()); builder.setVertexName(taskSpec.getVertexName());
private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerId, TaskSpec taskSpec, FragmentRuntimeInfo fragmentRuntimeInfo, String hiveQueryId) throws IOException { SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder(); builder.setFragmentNumber(taskSpec.getTaskAttemptID().getTaskID().getId()); builder.setAttemptNumber(taskSpec.getTaskAttemptID().getId()); builder.setContainerIdString(containerId.toString()); builder.setAmHost(getAmHostString()); builder.setAmPort(getAddress().getPort()); Preconditions.checkState(currentQueryIdentifierProto.getDagIndex() == taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId()); builder.setCredentialsBinary( getCredentials(getContext().getCurrentDagInfo().getCredentials())); builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.constructSignableVertexSpec( taskSpec, currentQueryIdentifierProto, getTokenIdentifier(), user, hiveQueryId)).build()); // Don't call builder.setWorkSpecSignature() - Tez doesn't sign fragments builder.setFragmentRuntimeInfo(fragmentRuntimeInfo); if (scheduler != null) { // May be null in tests // TODO: see javadoc builder.setIsGuaranteed(scheduler.isInitialGuaranteed(taskSpec.getTaskAttemptID())); } return builder.build(); }
public VertexEventTaskCompleted(TezTaskID taskID, TaskState taskState) { super(taskID.getVertexID(), VertexEventType.V_TASK_COMPLETED); this.taskID = taskID; this.taskState = taskState; }
public SpeculatorEventTaskAttemptStatusUpdate(TezTaskAttemptID taId, TaskAttemptState state, long timestamp, boolean justStarted) { super(SpeculatorEventType.S_TASK_ATTEMPT_STATUS_UPDATE, taId.getTaskID().getVertexID()); this.id = taId; this.state = state; this.timestamp = timestamp; this.justStarted = justStarted; }
public VertexEventTaskAttemptCompleted(TezTaskAttemptID taskAttemptId, TaskAttemptStateInternal state) { super(taskAttemptId.getTaskID().getVertexID(), VertexEventType.V_TASK_ATTEMPT_COMPLETED); this.attemptId = taskAttemptId; this.attempState = state; }
private String constructLlapLogUrl(final TezTaskAttemptID attemptID, final String containerIdString, final boolean isDone, final String nmAddress) { String dagId = attemptID.getTaskID().getVertexID().getDAGId().toString(); String filename = JOINER.join(currentHiveQueryId, "-", dagId, ".log", (isDone ? ".done" : ""), "?nm.id=", nmAddress); String url = PATH_JOINER.join(timelineServerUri, "ws", "v1", "applicationhistory", "containers", containerIdString, "logs", filename); return url; }
protected void internalErrorUncaughtException(TaskEventType type, Exception e) { eventHandler.handle(new DAGEventDiagnosticsUpdate( this.taskId.getVertexID().getDAGId(), "Uncaught exception when handling event " + type + " on Task " + this.taskId + ", error=" + e.getMessage())); eventHandler.handle(new DAGEvent(this.taskId.getVertexID().getDAGId(), DAGEventType.INTERNAL_ERROR)); }
private static List<TaskAttemptIdentifier> getTaskAttemptIdentifiers(DAG dag, List<TezTaskAttemptID> taIds) { List<TaskAttemptIdentifier> attempts = new ArrayList<TaskAttemptIdentifier>(taIds.size()); String dagName = dag.getName(); for (TezTaskAttemptID taId : taIds) { String vertexName = dag.getVertex(taId.getTaskID().getVertexID()).getName(); attempts.add(getTaskAttemptIdentifier(dagName, vertexName, taId)); } return attempts; }
public void scheduleTask(DAGEventSchedulerUpdate event) { VertexInfo vInfo = null; if (vertexInfo != null) { vInfo = vertexInfo.get(event.getAttempt().getID().getTaskID().getVertexID()); } scheduleTaskWithLimit(event, vInfo); }
protected void logJobHistoryTaskFailedEvent(TaskState finalState) { this.finishTime = clock.getTime(); TaskFinishedEvent finishEvt = new TaskFinishedEvent(taskId, getVertex().getName(), getLaunchTime(), this.finishTime, null, finalState, StringUtils.join(getDiagnostics(), LINE_SEPARATOR), getCounters(), failedAttempts); this.appContext.getHistoryHandler().handle( new DAGHistoryEvent(taskId.getVertexID().getDAGId(), finishEvt)); }
protected void logJobHistoryTaskFinishedEvent() { // FIXME need to handle getting finish time as this function // is called from within a transition this.finishTime = clock.getTime(); TaskFinishedEvent finishEvt = new TaskFinishedEvent(taskId, getVertex().getName(), getLaunchTime(), this.finishTime, successfulAttempt, TaskState.SUCCEEDED, "", getCounters(), failedAttempts); this.appContext.getHistoryHandler().handle( new DAGHistoryEvent(taskId.getVertexID().getDAGId(), finishEvt)); }