@Override protected RunFileWriter getRunFileWriter() throws HyracksDataException { FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(HybridTopKSortRunGenerator.class.getSimpleName()); return new RunFileWriter(file, ctx.getIoManager()); }
@Override public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException { IJobletEventListenerFactory fact = ctx.getJobletContext().getJobletEventListenerFactory(); return new IPushRuntime[] { new CommitRuntime(ctx, ((IJobEventListenerFactory) fact).getTxnId(datasetId), datasetId, primaryKeyFields, isWriteTransaction, datasetPartitions[ctx.getTaskAttemptId().getTaskId().getPartition()], isSink) }; } }
protected void calculateNumElementPerPage() { int frameSize = ctx.getInitialFrameSize(); // The count of Minframe, and the count of tuples in a frame should be deducted. frameSize = frameSize - FixedSizeFrameTupleAppender.MINFRAME_COUNT_SIZE - FixedSizeFrameTupleAppender.TUPLE_COUNT_SIZE; numPossibleElementPerPage = (int) Math.floor((double) frameSize / (invListElementSize + ELEMENT_COUNT_SIZE)); }
/** * get the shared object of a task as a Map<String,Object> * * @param ctx * the task context * @param create * @return the task shared map */ @SuppressWarnings("unchecked") public static Map<String, Object> getSharedMap(IHyracksTaskContext ctx, boolean create) { if (ctx.getSharedObject() != null) { return (Map<String, Object>) ctx.getSharedObject(); } else if (create) { Map<String, Object> taskMap = new HashMap<>(); ctx.setSharedObject(taskMap); return taskMap; } else { return null; } }
@Override protected RunFileWriter prepareIntermediateMergeRunFile() throws HyracksDataException { FileReference newRun = ctx.createManagedWorkspaceFile(ExternalSortRunMerger.class.getSimpleName()); return new RunFileWriter(newRun, ctx.getIoManager()); }
public ConnectorSenderProfilingFrameWriter(IHyracksTaskContext ctx, IFrameWriter writer, ConnectorDescriptorId cdId, int senderIndex, int receiverIndex) { this.writer = writer; int attempt = ctx.getTaskAttemptId().getAttempt(); this.openCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".open", true); this.closeCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".close", true); this.frameCounter = ctx.getCounterContext() .getCounter(cdId + ".sender." + attempt + "." + senderIndex + "." + receiverIndex + ".nextFrame", true); }
protected AbstractTypeScalarEvaluator(IScalarEvaluator[] args, IHyracksTaskContext ctx) { super(args); dCtx = (DynamicContext) ctx.getJobletContext().getGlobalJobData(); ip = (IntegerPointable) IntegerPointable.FACTORY.createPointable(); first = true; }
public IHyracksTaskContext createTestContext(JobId jobId, int partition, boolean withMessaging) throws HyracksDataException { IHyracksTaskContext ctx = TestUtils.create(KB32, ExecutionTestUtil.integrationUtil.ncs[0].getIoManager()); if (withMessaging) { TaskUtil.put(HyracksConstants.KEY_MESSAGE, new VSizeFrame(ctx), ctx); } IHyracksJobletContext jobletCtx = Mockito.mock(IHyracksJobletContext.class); JobEventListenerFactory factory = new JobEventListenerFactory(new TxnId(jobId.getId()), true); Mockito.when(jobletCtx.getJobletEventListenerFactory()).thenReturn(factory); Mockito.when(jobletCtx.getServiceContext()).thenReturn(ExecutionTestUtil.integrationUtil.ncs[0].getContext()); Mockito.when(jobletCtx.getJobId()).thenReturn(jobId); ctx = Mockito.spy(ctx); Mockito.when(ctx.getJobletContext()).thenReturn(jobletCtx); Mockito.when(ctx.getIoManager()).thenReturn(ExecutionTestUtil.integrationUtil.ncs[0].getIoManager()); TaskAttemptId taskId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), partition), 0); Mockito.when(ctx.getTaskAttemptId()).thenReturn(taskId); return ctx; }
new VariableFrameMemoryManager(new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)), FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2)); ctx.getJobletContext().createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString()); runFileWriter = new RunFileWriter(file, ctx.getIoManager()); runFileWriter.open();
try { for (final IOperatorNodePushable op : operatorNodePushablesBFSOrder) { tasks.add(ctx.getExecutorService().submit(() -> { startSemaphore.release(); try { Thread.currentThread() .setName(Thread.currentThread().getName() + ":" + ctx.getJobletContext().getJobId() + ":" + ctx.getTaskAttemptId() + ":" + SuperActivityOperatorNodePushable.class.getSimpleName()); action.run(op);
lsmIndex = (AbstractLSMIndex) indexHelper.getIndexInstance(); try { if (isPrimary && ctx.getSharedObject() != null) { PrimaryIndexLogMarkerCallback callback = new PrimaryIndexLogMarkerCallback(lsmIndex); TaskUtil.put(ILogMarkerCallback.KEY_MARKER_CALLBACK, callback, ctx); (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext(); LSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager()); } catch (Throwable th) {
public MaterializingPipelinedPartition(IHyracksTaskContext ctx, PartitionManager manager, PartitionId pid, TaskAttemptId taId, Executor executor) { this.ctx = ctx; this.executor = executor; this.ioManager = ctx.getIoManager(); this.manager = manager; this.pid = pid; this.taId = taId; }
@Override public void open() throws HyracksDataException { super.open(); deletedTupleCounter = new DeletedTupleCounter(ctx.getJobletContext().getJobId(), partition); ctx.setStateObject(deletedTupleCounter); try { tb = new ArrayTupleBuilder(recordDesc.getFieldCount()); dos = tb.getDataOutput(); appender = new FrameTupleAppender(new VSizeFrame(ctx), true); } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public final void deinitialize() throws HyracksDataException { activeManager.deregisterRuntime(runtimeId); try { ctx.sendApplicationMessageToCC(new ActivePartitionMessage(runtimeId, ctx.getJobletContext().getJobId(), Event.RUNTIME_DEREGISTERED, null), null); } catch (Exception e) { LOGGER.log(Level.INFO, "deinitialize() failed on ActiveSourceOperatorNodePushable", e); throw HyracksDataException.create(e); } finally { LOGGER.log(Level.INFO, "deinitialize() returning on ActiveSourceOperatorNodePushable"); } }
final IHyracksTaskContext ctx = nc.createTestContext(jobId, 0, true); final ConnectorDescriptorId codId = new ConnectorDescriptorId(1); final PartitionId pid = new PartitionId(ctx.getJobletContext().getJobId(), codId, 1, 1); final ChannelControlBlock ccb = ncs.getNetworkManager() .connect(NetworkingUtil.getSocketAddress(ncs.getNetworkManager().getLocalNetworkAddress())); mpp.open(); final ByteBuffer frame = ctx.allocateFrame(); while (frame.hasRemaining()) { frame.put((byte) 0);
private void run() throws HyracksDataException { // Start by getting the partition number from the manager LOGGER.info("Starting ingestion for partition:" + ctx.getTaskAttemptId().getTaskId().getPartition()); try { doRun(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw HyracksDataException.create(e); } catch (Exception e) { LOGGER.log(Level.WARN, "Unhandled Exception", e); throw HyracksDataException.create(e); } }
public ResultPartitionWriter(IHyracksTaskContext ctx, IResultPartitionManager manager, JobId jobId, ResultSetId rsId, boolean asyncMode, boolean orderedResult, int partition, int nPartitions, ResultMemoryManager resultMemoryManager, IWorkspaceFileFactory fileFactory, long maxReads) { this.manager = manager; this.jobId = jobId; this.resultSetId = rsId; this.orderedResult = orderedResult; this.partition = partition; this.nPartitions = nPartitions; this.resultMemoryManager = resultMemoryManager; resultSetPartitionId = new ResultSetPartitionId(jobId, rsId, partition); resultState = new ResultState(resultSetPartitionId, asyncMode, ctx.getIoManager(), fileFactory, ctx.getInitialFrameSize(), maxReads); }
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException { final IResultPartitionManager resultPartitionManager = ctx.getResultPartitionManager(); final FrameOutputStream frameOutputStream = new FrameOutputStream(ctx.getInitialFrameSize()); frameOutputStream.reset(frame, true); PrintStream printStream = new PrintStream(frameOutputStream);
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { ByteBuffer copyBuffer = ctx.allocateFrame(buffer.capacity()); FrameUtils.copyAndFlip(buffer, copyBuffer); state.joiner.cache(copyBuffer); }
public TreeIndexDiskOrderScanOperatorNodePushable(IHyracksTaskContext ctx, int partition, IIndexDataflowHelperFactory indexHelperFactory, ISearchOperationCallbackFactory searchCallbackFactory) throws HyracksDataException { this.ctx = ctx; this.treeIndexHelper = indexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.searchCallbackFactory = searchCallbackFactory; }