private void run() throws HyracksDataException { // Start by getting the partition number from the manager LOGGER.info("Starting ingestion for partition:" + ctx.getTaskAttemptId().getTaskId().getPartition()); try { doRun(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw HyracksDataException.create(e); } catch (Exception e) { LOGGER.log(Level.WARN, "Unhandled Exception", e); throw HyracksDataException.create(e); } }
public static TaskAttemptId parse(String str) { if (str.startsWith("TAID:")) { str = str.substring(5); int idIdx = str.lastIndexOf(':'); return new TaskAttemptId(TaskId.parse(str.substring(0, idIdx)), Integer.parseInt(str.substring(idIdx + 1))); } throw new IllegalArgumentException("Unable to parse: " + str); }
private static void writeTaskAttemptId(DataOutputStream dos, TaskAttemptId taId) throws IOException { TaskId tid = taId.getTaskId(); ActivityId aid = tid.getActivityId(); OperatorDescriptorId odId = aid.getOperatorDescriptorId(); dos.writeInt(odId.getId()); dos.writeInt(aid.getLocalId()); dos.writeInt(tid.getPartition()); dos.writeInt(taId.getAttempt()); }
public static TaskAttemptId create(DataInput dis) throws IOException { TaskAttemptId taskAttemptId = new TaskAttemptId(); taskAttemptId.readFields(dis); return taskAttemptId; }
TaskId tid = ts.getTaskId(); TaskAttempt taskAttempt = new TaskAttempt(tcAttempt, new TaskAttemptId(new TaskId(tid.getActivityId(), tid.getPartition()), attempts), ts); taskAttempt.setStatus(TaskAttempt.TaskStatus.INITIALIZED, null); locationMap.put(tid, for (TaskAttemptDescriptor tad : tads) { TaskAttemptId taid = tad.getTaskAttemptId(); int attempt = taid.getAttempt(); TaskId tid = taid.getTaskId(); ActivityId aid = tid.getActivityId(); List<IConnectorDescriptor> inConnectors = acg.getActivityInputs(aid);
public void notifyTaskComplete(TaskAttempt ta) { try { TaskAttemptId taId = ta.getTaskAttemptId(); TaskCluster tc = ta.getTask().getTaskCluster(); TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc); if (lastAttempt == null || taId.getAttempt() != lastAttempt.getAttempt()) { LOGGER.warn(() -> "Ignoring task complete notification: " + taId + " -- Current last attempt = " + lastAttempt); return; } TaskAttempt.TaskStatus taStatus = ta.getStatus(); if (taStatus != TaskAttempt.TaskStatus.RUNNING) { LOGGER.warn(() -> "Spurious task complete notification: " + taId + " Current state = " + taStatus); return; } ta.setStatus(TaskAttempt.TaskStatus.COMPLETED, null); ta.setEndTime(System.currentTimeMillis()); if (lastAttempt.decrementPendingTasksCounter() == 0) { lastAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.COMPLETED); lastAttempt.setEndTime(System.currentTimeMillis()); inProgressTaskClusters.remove(tc); startRunnableActivityClusters(); } } catch (Exception e) { LOGGER.error(() -> "Unexpected failure. Aborting job " + jobRun.getJobId(), e); abortJob(Collections.singletonList(e), NoOpCallback.INSTANCE); } }
@Override public void readFields(DataInput input) throws IOException { super.readFields(input); nodeId = input.readUTF(); int size = input.readInt(); taskProfiles = new HashMap<>(); for (int i = 0; i < size; i++) { TaskAttemptId key = TaskAttemptId.create(input); TaskProfile value = TaskProfile.create(input); taskProfiles.put(key, value); } }
public ConnectorSenderProfilingFrameWriter(IHyracksTaskContext ctx, IFrameWriter writer, ConnectorDescriptorId cdId, int senderIndex, int receiverIndex) { this.writer = writer; int attempt = ctx.getTaskAttemptId().getAttempt(); this.openCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".open", true); this.closeCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".close", true); this.frameCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".nextFrame", true); }
public static Object deserialize(ByteBuffer buffer, int length) throws Exception { ByteArrayInputStream bais = new ByteArrayInputStream(buffer.array(), buffer.position(), length); DataInputStream dis = new DataInputStream(bais); JobId jobId = JobId.create(dis); String nodeId = dis.readUTF(); TaskAttemptId taskId = TaskAttemptId.create(dis); TaskProfile statistics = TaskProfile.create(dis); return new NotifyTaskCompleteFunction(jobId, taskId, nodeId, statistics); }
@Override public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException { // Format: |TypeTag | PayloadLength | Payload | // TypeTag: 1 byte // PayloadLength: 1 byte // Payload: 12 bytes: |partition-id (4 bytes) | local-id (8 bytes) | byte[] uidBytes = new byte[BINARY_LENGTH]; // Writes the type tag. uidBytes[0] = ATypeTag.SERIALIZED_BINARY_TYPE_TAG; // Writes the payload size. uidBytes[1] = BINARY_LENGTH - PAYLOAD_START; // Writes the 4 byte partition id. IntegerPointable.setInteger(uidBytes, PAYLOAD_START, ctx.getTaskAttemptId().getTaskId().getPartition()); return new IScalarEvaluator() { @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException { // Increments the Unique ID value. for (int i = BINARY_LENGTH - 1; i >= PAYLOAD_START; i--) { if (++uidBytes[i] != 0) { break; } } result.set(uidBytes, 0, BINARY_LENGTH); } }; } };
ObjectNode json = om.createObjectNode(); json.put("activity-id", taskAttemptId.getTaskId().getActivityId().toString()); json.put("partition", taskAttemptId.getTaskId().getPartition()); json.put("attempt", taskAttemptId.getAttempt()); if (partitionSendProfile != null) { ArrayNode pspArray = om.createArrayNode();
private static TaskAttemptId readTaskAttemptId(DataInputStream dis) throws IOException { int odid = dis.readInt(); int aid = dis.readInt(); int partition = dis.readInt(); int attempt = dis.readInt(); TaskAttemptId taId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(odid), aid), partition), attempt); return taId; }
@Override public void close() throws HyracksDataException { if (LOGGER.isInfoEnabled()) { LOGGER.info("close(" + pid + " by " + taId); } if (handle != null) { ctx.getIoManager().close(handle); } if (!failed) { manager.registerPartition(pid, ctx.getJobletContext().getJobId().getCcId(), taId, new MaterializedPartition(ctx, fRef, executor, ctx.getIoManager()), PartitionState.COMMITTED, taId.getAttempt() == 0 ? false : true); } }
@Override public void readFields(DataInput input) throws IOException { taId = TaskAttemptId.create(input); nPartitions = input.readInt();
NetworkAddress networkAddress = inputAddresses[i][j]; PartitionId pid = new PartitionId(jobId, inputs.get(i).getConnectorId(), j, td.getTaskAttemptId().getTaskId().getPartition()); PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(),
@Override public final void runWork() { IJobManager jobManager = ccs.getJobManager(); JobRun run = jobManager.get(jobId); if (run != null) { TaskId tid = taId.getTaskId(); Map<ActivityId, ActivityCluster> activityClusterMap = run.getActivityClusterGraph().getActivityMap(); ActivityCluster ac = activityClusterMap.get(tid.getActivityId()); if (ac != null) { Map<ActivityId, ActivityPlan> taskStateMap = run.getActivityClusterPlanMap().get(ac.getId()).getActivityPlanMap(); Task[] taskStates = taskStateMap.get(tid.getActivityId()).getTasks(); if (taskStates != null && taskStates.length > tid.getPartition()) { Task ts = taskStates[tid.getPartition()]; TaskCluster tc = ts.getTaskCluster(); List<TaskClusterAttempt> taskClusterAttempts = tc.getAttempts(); if (taskClusterAttempts != null && taskClusterAttempts.size() > taId.getAttempt()) { TaskClusterAttempt tca = taskClusterAttempts.get(taId.getAttempt()); TaskAttempt ta = tca.getTaskAttempts().get(tid); if (ta != null) { performEvent(ta); } } } } } }
public static IHyracksTaskContext create(int frameSize, IOManager ioManager) { try { INCServiceContext serviceCtx = new TestNCServiceContext(ioManager, null); TestJobletContext jobletCtx = new TestJobletContext(frameSize, serviceCtx, new JobId(0)); TaskAttemptId tid = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), 0), 0); IHyracksTaskContext taskCtx = new TestTaskContext(jobletCtx, tid); return taskCtx; } catch (HyracksException e) { throw new RuntimeException(e); } }
TaskCluster tc = ta.getTask().getTaskCluster(); TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc); if (lastAttempt != null && taId.getAttempt() == lastAttempt.getAttempt()) { LOGGER.trace(() -> "Marking TaskAttempt " + ta.getTaskAttemptId() + " as failed"); ta.setStatus(TaskAttempt.TaskStatus.FAILED, exceptions);
@Override public void readFields(DataInput input) throws IOException { super.readFields(input); taskAttemptId = TaskAttemptId.create(input); int size = input.readInt(); partitionSendProfile = new HashMap<>(); for (int i = 0; i < size; i++) { PartitionId key = PartitionId.create(input); PartitionProfile value = PartitionProfile.create(input); partitionSendProfile.put(key, value); } statsCollector = StatsCollector.create(input); }
private void doRun() throws HyracksDataException, InterruptedException { while (true) { try { // Start the adapter adapter.start(ctx.getTaskAttemptId().getTaskId().getPartition(), writer); // Adapter has completed execution return; } catch (InterruptedException e) { throw e; } catch (Exception e) { LOGGER.log(Level.WARN, "Exception during feed ingestion ", e); throw HyracksDataException.create(e); } } }