private static void writeTaskAttemptId(DataOutputStream dos, TaskAttemptId taId) throws IOException { TaskId tid = taId.getTaskId(); ActivityId aid = tid.getActivityId(); OperatorDescriptorId odId = aid.getOperatorDescriptorId(); dos.writeInt(odId.getId()); dos.writeInt(aid.getLocalId()); dos.writeInt(tid.getPartition()); dos.writeInt(taId.getAttempt()); }
private void run() throws HyracksDataException { // Start by getting the partition number from the manager LOGGER.info("Starting ingestion for partition:" + ctx.getTaskAttemptId().getTaskId().getPartition()); try { doRun(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw HyracksDataException.create(e); } catch (Exception e) { LOGGER.log(Level.WARN, "Unhandled Exception", e); throw HyracksDataException.create(e); } }
@Override public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException { // Format: |TypeTag | PayloadLength | Payload | // TypeTag: 1 byte // PayloadLength: 1 byte // Payload: 12 bytes: |partition-id (4 bytes) | local-id (8 bytes) | byte[] uidBytes = new byte[BINARY_LENGTH]; // Writes the type tag. uidBytes[0] = ATypeTag.SERIALIZED_BINARY_TYPE_TAG; // Writes the payload size. uidBytes[1] = BINARY_LENGTH - PAYLOAD_START; // Writes the 4 byte partition id. IntegerPointable.setInteger(uidBytes, PAYLOAD_START, ctx.getTaskAttemptId().getTaskId().getPartition()); return new IScalarEvaluator() { @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException { // Increments the Unique ID value. for (int i = BINARY_LENGTH - 1; i >= PAYLOAD_START; i--) { if (++uidBytes[i] != 0) { break; } } result.set(uidBytes, 0, BINARY_LENGTH); } }; } };
NetworkAddress networkAddress = inputAddresses[i][j]; PartitionId pid = new PartitionId(jobId, inputs.get(i).getConnectorId(), j, td.getTaskAttemptId().getTaskId().getPartition()); PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(),
private void doRun() throws HyracksDataException, InterruptedException { while (true) { try { // Start the adapter adapter.start(ctx.getTaskAttemptId().getTaskId().getPartition(), writer); // Adapter has completed execution return; } catch (InterruptedException e) { throw e; } catch (Exception e) { LOGGER.log(Level.WARN, "Exception during feed ingestion ", e); throw HyracksDataException.create(e); } } }
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { IJobletEventListenerFactory fact = ctx.getJobletContext().getJobletEventListenerFactory(); return new IPushRuntime[] { new CommitRuntime(ctx, ((IJobEventListenerFactory) fact).getTxnId(datasetId), datasetId, primaryKeyFields, isWriteTransaction, datasetPartitions[ctx.getTaskAttemptId().getTaskId().getPartition()], isSink) }; } }
for (TaskAttempt ta : tca.getTaskAttempts().values()) { ObjectNode taskAttempt = om.createObjectNode(); taskAttempt.putPOJO("task-id", ta.getTaskAttemptId().getTaskId()); taskAttempt.putPOJO("task-attempt-id", ta.getTaskAttemptId()); taskAttempt.putPOJO("status", ta.getStatus());
@Override public final void runWork() { IJobManager jobManager = ccs.getJobManager(); JobRun run = jobManager.get(jobId); if (run != null) { TaskId tid = taId.getTaskId(); Map<ActivityId, ActivityCluster> activityClusterMap = run.getActivityClusterGraph().getActivityMap(); ActivityCluster ac = activityClusterMap.get(tid.getActivityId()); if (ac != null) { Map<ActivityId, ActivityPlan> taskStateMap = run.getActivityClusterPlanMap().get(ac.getId()).getActivityPlanMap(); Task[] taskStates = taskStateMap.get(tid.getActivityId()).getTasks(); if (taskStates != null && taskStates.length > tid.getPartition()) { Task ts = taskStates[tid.getPartition()]; TaskCluster tc = ts.getTaskCluster(); List<TaskClusterAttempt> taskClusterAttempts = tc.getAttempts(); if (taskClusterAttempts != null && taskClusterAttempts.size() > taId.getAttempt()) { TaskClusterAttempt tca = taskClusterAttempts.get(taId.getAttempt()); TaskAttempt ta = tca.getTaskAttempts().get(tid); if (ta != null) { performEvent(ta); } } } } } }
TaskAttemptDescriptor td = taskDescriptors.get(taskIndex); TaskAttemptId taId = td.getTaskAttemptId(); TaskId tid = taId.getTaskId(); ActivityId aid = tid.getActivityId(); ActivityCluster ac = acg.getActivityMap().get(aid);
final ByteBufferInputStream bbis = new ByteBufferInputStream(); final DataInputStream di = new DataInputStream(bbis); final int partition = ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider((short) partition); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
final ByteBufferInputStream bbis = new ByteBufferInputStream(); final DataInputStream di = new DataInputStream(bbis); final int partition = ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider((short) partition); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
TaskAttemptId taid = tad.getTaskAttemptId(); int attempt = taid.getAttempt(); TaskId tid = taid.getTaskId(); ActivityId aid = tid.getActivityId(); List<IConnectorDescriptor> inConnectors = acg.getActivityInputs(aid);
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { return new IPushRuntime[] { new CommitRuntime(ctx, new TxnId(ctx.getJobletContext().getJobId().getId()), getDatasetId(), primaryKeyFieldPermutation, true, ctx.getTaskAttemptId().getTaskId().getPartition(), true) }; } };
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); LSMInsertDeleteOperatorNodePushable insertOp = new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDesc, op, true, indexHelperFactory, modOpCallbackFactory, null, null); new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), secondaryIndexInfo.insertFieldsPermutations, secondaryIndexInfo.rDesc, op, false, secondaryIndexHelperFactory, secondaryModCallbackFactory, null, null);
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); LSMPrimaryUpsertOperatorNodePushable insertOp = new LSMPrimaryUpsertOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), indexHelperFactory, primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0), true, ctx.getTaskAttemptId().getTaskId().getPartition(), true); insertOp.setOutputFrameWriter(0, commitOp, upsertOutRecDesc); commitOp.setInputRecordDescriptor(0, upsertOutRecDesc);
public IPushRuntime getFullScanPipeline(IFrameWriter countOp, IHyracksTaskContext ctx, Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, NoMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, int[] filterFields, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, StorageComponentProvider storageComponentProvider) throws HyracksDataException, AlgebricksException { IPushRuntime emptyTupleOp = new EmptyTupleSourceRuntimeFactory().createPushRuntime(ctx)[0]; JobSpecification spec = new JobSpecification(); PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterFields, primaryKeyIndexes, primaryKeyIndicators); IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); BTreeSearchOperatorDescriptor searchOpDesc = new BTreeSearchOperatorDescriptor(spec, primaryIndexInfo.rDesc, null, null, true, true, indexDataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, filterFields, filterFields, false); BTreeSearchOperatorNodePushable searchOp = searchOpDesc.createPushRuntime(ctx, primaryIndexInfo.getSearchRecordDescriptorProvider(), ctx.getTaskAttemptId().getTaskId().getPartition(), 1); emptyTupleOp.setOutputFrameWriter(0, searchOp, primaryIndexInfo.getSearchRecordDescriptorProvider().getInputRecordDescriptor(null, 0)); searchOp.setOutputFrameWriter(0, countOp, primaryIndexInfo.rDesc); return emptyTupleOp; }
final IFrame frame = new VSizeFrame(ctx); final IFrameFieldAppender appender = new FrameFixedFieldTupleAppender(fieldOutputCount); final short partitionId = (short) ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider(partitionId, dataSourceId, totalDataSources); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();