private void compressCompletedBuckets(Task task, OperationResult result) throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException { List<WorkBucketType> buckets = new ArrayList<>(getWorkState(task).getBucket()); TaskWorkStateTypeUtil.sortBucketsBySequentialNumber(buckets); List<WorkBucketType> completeBuckets = buckets.stream() .filter(b -> b.getState() == WorkBucketStateType.COMPLETE) .collect(Collectors.toList()); if (completeBuckets.size() <= 1) { LOGGER.trace("Compression of completed buckets: # of complete buckets is too small ({}) in {}, exiting", completeBuckets.size(), task); return; } List<ItemDelta<?, ?>> deleteItemDeltas = new ArrayList<>(); for (int i = 0; i < completeBuckets.size() - 1; i++) { deleteItemDeltas.addAll(bucketDeleteDeltas(completeBuckets.get(i))); } LOGGER.trace("Compression of completed buckets: deleting {} buckets before last completed one in {}", deleteItemDeltas.size(), task); // these buckets should not be touched by anyone (as they are already completed); so we can execute without preconditions if (!deleteItemDeltas.isEmpty()) { repositoryService.modifyObject(TaskType.class, task.getOid(), deleteItemDeltas, null, result); } }
public NumericWorkSegmentationStrategy(@NotNull TaskWorkManagementType configuration, PrismContext prismContext) { super(configuration, prismContext); this.configuration = configuration; this.bucketsConfiguration = (NumericWorkSegmentationType) TaskWorkStateTypeUtil.getWorkSegmentationConfiguration(configuration); }
public static Task findWorkerByBucketNumber(List<Task> workers, int sequentialNumber) { for (Task worker : workers) { if (worker.getWorkState() != null && TaskWorkStateTypeUtil .findBucketByNumber(worker.getWorkState().getBucket(), sequentialNumber) != null) { return worker; } } return null; }
@NotNull protected List<? extends AbstractWorkBucketContentType> createAdditionalBuckets(TaskWorkStateType workState) throws SchemaException { WorkBucketType lastBucket = TaskWorkStateTypeUtil.getLastBucket(workState.getBucket()); AbstractWorkBucketContentType lastContent = lastBucket != null ? lastBucket.getContent() : null; Integer lastSequentialNumber = lastBucket != null ? lastBucket.getSequentialNumber() : null; int count = getBucketCreationBatch(); List<AbstractWorkBucketContentType> rv = new ArrayList<>(count); for (int i = 0; i < count; i++) { AbstractWorkBucketContentType newContent = createAdditionalBucket(lastContent, lastSequentialNumber); if (newContent == null) { break; } rv.add(newContent); lastContent = newContent; lastSequentialNumber = lastSequentialNumber != null ? lastSequentialNumber + 1 : 1; } return rv; }
@Nullable private Long computeExpectedTotalIfApplicable(Class<? extends ObjectType> type, ObjectQuery query, Collection<SelectorOptions<GetOperationOptions>> queryOptions, boolean useRepository, WorkBucketType workBucket, Task localCoordinatorTask, OperationResult opResult) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (!countObjectsOnStart) { return null; } else if (TaskWorkStateTypeUtil.hasLimitations(workBucket)) { // We avoid computing expected total if we are processing a bucket -- actually we could but we should // not display it as 'task expected total' return null; } else { Long expectedTotal; if (!useRepository) { Integer expectedTotalInt = countObjects(type, query, queryOptions, localCoordinatorTask, opResult); if (expectedTotalInt != null) { expectedTotal = (long) expectedTotalInt; // conversion would fail on null } else { expectedTotal = null; } } else { expectedTotal = (long) repositoryService.countObjects(type, query, queryOptions, opResult); } LOGGER.trace("{}: expecting {} objects to be processed", taskName, expectedTotal); return expectedTotal; } }
private ModificationPrecondition<TaskType> bucketUnchangedPrecondition(WorkBucketType originalBucket) { return taskObject -> { WorkBucketType currentBucket = findBucketByNumber(getWorkStateOrNew(taskObject).getBucket(), originalBucket.getSequentialNumber()); // performance is not optimal but OK for precondition checking return currentBucket != null && cloneNoId(currentBucket).equals(cloneNoId(originalBucket)); }; }
if (!newBucketsContent.isEmpty()) { List<WorkBucketType> newBuckets = new ArrayList<>(newBucketsContent.size()); WorkBucketType lastBucket = TaskWorkStateTypeUtil.getLastBucket(workState.getBucket()); int sequentialNumber = lastBucket != null ? lastBucket.getSequentialNumber() + 1 : 1; for (AbstractWorkBucketContentType newBucketContent : newBucketsContent) {
private WorkBucketType findSelfAllocatedBucket(Context ctx) { TaskWorkStateType workState = ctx.workerTask.getTaskType().getWorkState(); if (workState == null || workState.getBucket().isEmpty()) { return null; } List<WorkBucketType> buckets = new ArrayList<>(workState.getBucket()); TaskWorkStateTypeUtil.sortBucketsBySequentialNumber(buckets); for (WorkBucketType bucket : buckets) { if (bucket.getState() == WorkBucketStateType.READY) { return bucket; } } return null; }
private void releaseWorkBucketMultiNode(Context ctx, int sequentialNumber, OperationResult result) throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException { TaskWorkStateType workState = getWorkState(ctx.coordinatorTask); WorkBucketType bucket = TaskWorkStateTypeUtil.findBucketByNumber(workState.getBucket(), sequentialNumber); if (bucket == null) { throw new IllegalStateException("No work bucket with sequential number of " + sequentialNumber + " in " + ctx.coordinatorTask); } if (bucket.getState() != WorkBucketStateType.DELEGATED) { throw new IllegalStateException("Work bucket " + sequentialNumber + " in " + ctx.coordinatorTask + " cannot be released, as it is not delegated; its state = " + bucket.getState()); } try { repositoryService.modifyObject(TaskType.class, ctx.coordinatorTask.getOid(), bucketStateChangeDeltas(bucket, WorkBucketStateType.READY), bucketUnchangedPrecondition(bucket), null, result); } catch (PreconditionViolationException e) { // just for sure throw new IllegalStateException("Unexpected concurrent modification of work bucket " + bucket + " in " + ctx.coordinatorTask, e); } TaskWorkStateType workerWorkState = getWorkState(ctx.workerTask); WorkBucketType workerBucket = TaskWorkStateTypeUtil.findBucketByNumber(workerWorkState.getBucket(), sequentialNumber); if (workerBucket == null) { //LOGGER.warn("No work bucket with sequential number of " + sequentialNumber + " in worker task " + ctx.workerTask); //return; // just during testing throw new IllegalStateException("No work bucket with sequential number of " + sequentialNumber + " in worker task " + ctx.workerTask); } repositoryService.modifyObject(TaskType.class, ctx.workerTask.getOid(), bucketDeleteDeltas(workerBucket), result); }
public ExplicitWorkSegmentationStrategy(@NotNull TaskWorkManagementType configuration, PrismContext prismContext) { super(configuration, prismContext); this.bucketsConfiguration = (ExplicitWorkSegmentationType) TaskWorkStateTypeUtil.getWorkSegmentationConfiguration(configuration); }
sortBucketsBySequentialNumber(buckets); assertEquals(1, buckets.size()); assertNumericBucket(buckets.get(0), WorkBucketStateType.READY, 1, 0, 100); sortBucketsBySequentialNumber(buckets); assertEquals(2, buckets.size()); assertNumericBucket(buckets.get(0), WorkBucketStateType.COMPLETE, 1, 0, 100); sortBucketsBySequentialNumber(buckets); assertEquals(2, buckets.size()); assertNumericBucket(buckets.get(0), WorkBucketStateType.COMPLETE, 2, 100, 200);
private void completeWorkBucketMultiNode(Context ctx, int sequentialNumber, OperationResult result) throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException { TaskWorkStateType workState = getWorkState(ctx.coordinatorTask); WorkBucketType bucket = TaskWorkStateTypeUtil.findBucketByNumber(workState.getBucket(), sequentialNumber); if (bucket == null) { throw new IllegalStateException("No work bucket with sequential number of " + sequentialNumber + " in " + ctx.coordinatorTask); } if (bucket.getState() != WorkBucketStateType.DELEGATED) { throw new IllegalStateException("Work bucket " + sequentialNumber + " in " + ctx.coordinatorTask + " cannot be marked as complete, as it is not delegated; its state = " + bucket.getState()); } Collection<ItemDelta<?, ?>> modifications = bucketStateChangeDeltas(bucket, WorkBucketStateType.COMPLETE); try { repositoryService.modifyObject(TaskType.class, ctx.coordinatorTask.getOid(), modifications, bucketUnchangedPrecondition(bucket), null, result); } catch (PreconditionViolationException e) { throw new IllegalStateException("Unexpected concurrent modification of work bucket " + bucket + " in " + ctx.coordinatorTask, e); } ItemDeltaCollectionsUtil.applyTo(modifications, ctx.coordinatorTask.getTaskPrismObject()); compressCompletedBuckets(ctx.coordinatorTask, result); TaskWorkStateType workerWorkState = getWorkState(ctx.workerTask); WorkBucketType workerBucket = TaskWorkStateTypeUtil.findBucketByNumber(workerWorkState.getBucket(), sequentialNumber); if (workerBucket == null) { //LOGGER.warn("No work bucket with sequential number of " + sequentialNumber + " in worker task " + ctx.workerTask); //return; // just during testing throw new IllegalStateException("No work bucket with sequential number of " + sequentialNumber + " in worker task " + ctx.workerTask); } repositoryService.modifyObject(TaskType.class, ctx.workerTask.getOid(), bucketDeleteDeltas(workerBucket), result); }
/** * Creates work state management strategy based on provided configuration. */ @NotNull public WorkSegmentationStrategy createStrategy(TaskWorkManagementType configuration) { AbstractWorkSegmentationType cfg = TaskWorkStateTypeUtil.getWorkSegmentationConfiguration(configuration); if (cfg == null) { return new SingleNullWorkSegmentationStrategy(configuration, prismContext); } Class<? extends WorkSegmentationStrategy> strategyClass = strategyClassMap.get(cfg.getClass()); if (strategyClass == null) { throw new IllegalStateException("Unknown or unsupported work state management configuration: " + configuration); } try { Constructor<? extends WorkSegmentationStrategy> constructor = strategyClass.getConstructor(configuration.getClass(), PrismContext.class); return constructor.newInstance(configuration, prismContext); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) { throw new SystemException("Couldn't instantiate work bucket segmentation strategy " + strategyClass + " for " + configuration, e); } }
assertNumericBucket(bucket4a, null, 4, 3, 4); List<WorkBucketType> buckets = new ArrayList<>(coordinator.getTaskType().getWorkState().getBucket()); sortBucketsBySequentialNumber(buckets); assertEquals(5, buckets.size()); assertNumericBucket(buckets.get(0), WorkBucketStateType.DELEGATED, 1, 0, 1); sortBucketsBySequentialNumber(buckets); sortBucketsBySequentialNumber(buckets); sortBucketsBySequentialNumber(buckets); sortBucketsBySequentialNumber(buckets); assertEquals(3, buckets.size()); assertNumericBucket(buckets.get(0), WorkBucketStateType.COMPLETE, 3, 2, 3);
private void completeWorkBucketStandalone(Context ctx, int sequentialNumber, OperationResult result) throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException { TaskWorkStateType workState = getWorkState(ctx.workerTask); WorkBucketType bucket = TaskWorkStateTypeUtil.findBucketByNumber(workState.getBucket(), sequentialNumber); if (bucket == null) { throw new IllegalStateException("No work bucket with sequential number of " + sequentialNumber + " in " + ctx.workerTask); } if (bucket.getState() != WorkBucketStateType.READY && bucket.getState() != null) { throw new IllegalStateException("Work bucket " + sequentialNumber + " in " + ctx.coordinatorTask + " cannot be marked as complete, as it is not ready; its state = " + bucket.getState()); } Collection<ItemDelta<?, ?>> modifications = bucketStateChangeDeltas(bucket, WorkBucketStateType.COMPLETE); repositoryService.modifyObject(TaskType.class, ctx.workerTask.getOid(), modifications, null, result); ItemDeltaCollectionsUtil.applyTo(modifications, ctx.workerTask.getTaskPrismObject()); compressCompletedBuckets(ctx.workerTask, result); }
public StringWorkSegmentationStrategy(@NotNull TaskWorkManagementType configuration, PrismContext prismContext) { super(configuration, prismContext); this.bucketsConfiguration = (StringWorkSegmentationType) TaskWorkStateTypeUtil.getWorkSegmentationConfiguration(configuration); this.marking = defaultIfNull(bucketsConfiguration.getComparisonMethod(), INTERVAL); this.boundaries = processBoundaries(); }
public ObjectQuery narrowQueryForWorkBucket(Task workerTask, ObjectQuery query, Class<? extends ObjectType> type, Function<ItemPath, ItemDefinition<?>> itemDefinitionProvider, WorkBucketType workBucket, OperationResult result) throws SchemaException, ObjectNotFoundException { Context ctx = createContext(workerTask.getOid(), () -> true, result); TaskWorkManagementType config = ctx.getWorkStateConfiguration(); AbstractWorkSegmentationType bucketsConfig = TaskWorkStateTypeUtil.getWorkSegmentationConfiguration(config); WorkBucketContentHandler handler = handlerFactory.getHandler(workBucket.getContent()); List<ObjectFilter> conjunctionMembers = new ArrayList<>( handler.createSpecificFilters(workBucket, bucketsConfig, type, itemDefinitionProvider)); if (conjunctionMembers.isEmpty()) { return query; } ObjectFilter existingFilter = query != null ? query.getFilter() : null; if (existingFilter != null) { conjunctionMembers.add(existingFilter); } ObjectFilter updatedFilter; if (conjunctionMembers.isEmpty()) { updatedFilter = null; } else if (conjunctionMembers.size() == 1) { updatedFilter = conjunctionMembers.get(0); } else { updatedFilter = prismContext.queryFactory().createAnd(conjunctionMembers); } ObjectQuery updatedQuery = query != null ? query.clone() : prismContext.queryFactory().createQuery(); updatedQuery.setFilter(updatedFilter); // TODO update sorting criteria return updatedQuery; }