private void run() throws HyracksDataException { // Start by getting the partition number from the manager LOGGER.info("Starting ingestion for partition:" + ctx.getTaskAttemptId().getTaskId().getPartition()); try { doRun(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw HyracksDataException.create(e); } catch (Exception e) { LOGGER.log(Level.WARN, "Unhandled Exception", e); throw HyracksDataException.create(e); } }
Thread.currentThread() .setName(Thread.currentThread().getName() + ":" + ctx.getJobletContext().getJobId() + ":" + ctx.getTaskAttemptId() + ":" + SuperActivityOperatorNodePushable.class.getSimpleName()); action.run(op);
@Override public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException { // Format: |TypeTag | PayloadLength | Payload | // TypeTag: 1 byte // PayloadLength: 1 byte // Payload: 12 bytes: |partition-id (4 bytes) | local-id (8 bytes) | byte[] uidBytes = new byte[BINARY_LENGTH]; // Writes the type tag. uidBytes[0] = ATypeTag.SERIALIZED_BINARY_TYPE_TAG; // Writes the payload size. uidBytes[1] = BINARY_LENGTH - PAYLOAD_START; // Writes the 4 byte partition id. IntegerPointable.setInteger(uidBytes, PAYLOAD_START, ctx.getTaskAttemptId().getTaskId().getPartition()); return new IScalarEvaluator() { @Override public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException { // Increments the Unique ID value. for (int i = BINARY_LENGTH - 1; i >= PAYLOAD_START; i--) { if (++uidBytes[i] != 0) { break; } } result.set(uidBytes, 0, BINARY_LENGTH); } }; } };
private void doRun() throws HyracksDataException, InterruptedException { while (true) { try { // Start the adapter adapter.start(ctx.getTaskAttemptId().getTaskId().getPartition(), writer); // Adapter has completed execution return; } catch (InterruptedException e) { throw e; } catch (Exception e) { LOGGER.log(Level.WARN, "Exception during feed ingestion ", e); throw HyracksDataException.create(e); } } }
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { IJobletEventListenerFactory fact = ctx.getJobletContext().getJobletEventListenerFactory(); return new IPushRuntime[] { new CommitRuntime(ctx, ((IJobEventListenerFactory) fact).getTxnId(datasetId), datasetId, primaryKeyFields, isWriteTransaction, datasetPartitions[ctx.getTaskAttemptId().getTaskId().getPartition()], isSink) }; } }
public ConnectorSenderProfilingFrameWriter(IHyracksTaskContext ctx, IFrameWriter writer, ConnectorDescriptorId cdId, int senderIndex, int receiverIndex) { this.writer = writer; int attempt = ctx.getTaskAttemptId().getAttempt(); this.openCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".open", true); this.closeCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".close", true); this.frameCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".nextFrame", true); }
public IHyracksTaskContext createTestContext(JobId jobId, int partition, boolean withMessaging) throws HyracksDataException { IHyracksTaskContext ctx = TestUtils.create(KB32, ExecutionTestUtil.integrationUtil.ncs[0].getIoManager()); if (withMessaging) { TaskUtil.put(HyracksConstants.KEY_MESSAGE, new VSizeFrame(ctx), ctx); } IHyracksJobletContext jobletCtx = Mockito.mock(IHyracksJobletContext.class); JobEventListenerFactory factory = new JobEventListenerFactory(new TxnId(jobId.getId()), true); Mockito.when(jobletCtx.getJobletEventListenerFactory()).thenReturn(factory); Mockito.when(jobletCtx.getServiceContext()).thenReturn(ExecutionTestUtil.integrationUtil.ncs[0].getContext()); Mockito.when(jobletCtx.getJobId()).thenReturn(jobId); ctx = Mockito.spy(ctx); Mockito.when(ctx.getJobletContext()).thenReturn(jobletCtx); Mockito.when(ctx.getIoManager()).thenReturn(ExecutionTestUtil.integrationUtil.ncs[0].getIoManager()); TaskAttemptId taskId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), partition), 0); Mockito.when(ctx.getTaskAttemptId()).thenReturn(taskId); return ctx; }
final ByteBufferInputStream bbis = new ByteBufferInputStream(); final DataInputStream di = new DataInputStream(bbis); final int partition = ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider((short) partition); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
final ByteBufferInputStream bbis = new ByteBufferInputStream(); final DataInputStream di = new DataInputStream(bbis); final int partition = ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider((short) partition); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { return new IPushRuntime[] { new CommitRuntime(ctx, new TxnId(ctx.getJobletContext().getJobId().getId()), getDatasetId(), primaryKeyFieldPermutation, true, ctx.getTaskAttemptId().getTaskId().getPartition(), true) }; } };
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); LSMInsertDeleteOperatorNodePushable insertOp = new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDesc, op, true, indexHelperFactory, modOpCallbackFactory, null, null); new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), secondaryIndexInfo.insertFieldsPermutations, secondaryIndexInfo.rDesc, op, false, secondaryIndexHelperFactory, secondaryModCallbackFactory, null, null);
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); LSMPrimaryUpsertOperatorNodePushable insertOp = new LSMPrimaryUpsertOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(), indexHelperFactory, primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0), true, ctx.getTaskAttemptId().getTaskId().getPartition(), true); insertOp.setOutputFrameWriter(0, commitOp, upsertOutRecDesc); commitOp.setInputRecordDescriptor(0, upsertOutRecDesc);
public IPushRuntime getFullScanPipeline(IFrameWriter countOp, IHyracksTaskContext ctx, Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, NoMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, int[] filterFields, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, StorageComponentProvider storageComponentProvider) throws HyracksDataException, AlgebricksException { IPushRuntime emptyTupleOp = new EmptyTupleSourceRuntimeFactory().createPushRuntime(ctx)[0]; JobSpecification spec = new JobSpecification(); PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterFields, primaryKeyIndexes, primaryKeyIndicators); IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); BTreeSearchOperatorDescriptor searchOpDesc = new BTreeSearchOperatorDescriptor(spec, primaryIndexInfo.rDesc, null, null, true, true, indexDataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, filterFields, filterFields, false); BTreeSearchOperatorNodePushable searchOp = searchOpDesc.createPushRuntime(ctx, primaryIndexInfo.getSearchRecordDescriptorProvider(), ctx.getTaskAttemptId().getTaskId().getPartition(), 1); emptyTupleOp.setOutputFrameWriter(0, searchOp, primaryIndexInfo.getSearchRecordDescriptorProvider().getInputRecordDescriptor(null, 0)); searchOp.setOutputFrameWriter(0, countOp, primaryIndexInfo.rDesc); return emptyTupleOp; }
final IFrame frame = new VSizeFrame(ctx); final IFrameFieldAppender appender = new FrameFixedFieldTupleAppender(fieldOutputCount); final short partitionId = (short) ctx.getTaskAttemptId().getTaskId().getPartition(); final ITreeNodeIdProvider nodeIdProvider = new TreeNodeIdProvider(partitionId, dataSourceId, totalDataSources); final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();