/** * Adds a work to the current plan. The entityClass of the work must be of the * type managed by this. * * @param work the {@code Work} instance to add to the plan */ public void addWork(Work work) { if ( work.getType() == WorkType.PURGE_ALL ) { entityById.clear(); this.deletionQueries.clear(); purgeAll = true; } else if ( work.getType() == WorkType.DELETE_BY_QUERY ) { DeleteByQueryWork delWork = (DeleteByQueryWork) work; this.deletionQueries.add( delWork.getDeleteByQuery() ); } else { Serializable id = extractProperId( work ); PerEntityWork entityWork = entityById.get( id ); if ( entityWork == null ) { entityWork = new PerEntityWork( work ); entityById.put( id, entityWork ); } entityWork.addWork( work ); } }
switch ( work.getType() ) { case ADD: operation = interceptor.onAdd( work.getEntity() ); break; default: throw new AssertionFailure( "Unknown work type: " + work.getType() ); case SKIP: result = null; log.forceSkipIndexOperationViaInterception( entityType, work.getType() ); break; case UPDATE: result = new Work( work.getTenantIdentifier(), work.getEntity(), work.getId(), WorkType.UPDATE ); log.forceUpdateOnIndexOperationViaInterception( entityType, work.getType() ); break; case REMOVE: log.forceRemoveOnIndexOperationViaInterception( entityType, work.getType() ); break; default:
/** * Adds a work to the current plan. The entityClass of the work must be of the * type managed by this. * * @param work the {@code Work} instance to add to the plan */ public void addWork(Work work) { if ( work.getType() == WorkType.PURGE_ALL ) { entityById.clear(); this.deletionQueries.clear(); purgeAll = true; } else if ( work.getType() == WorkType.DELETE_BY_QUERY ) { DeleteByQueryWork delWork = (DeleteByQueryWork) work; this.deletionQueries.add( delWork.getDeleteByQuery() ); } else { Serializable id = extractProperId( work ); PerEntityWork entityWork = entityById.get( id ); if ( entityWork == null ) { entityWork = new PerEntityWork( work ); entityById.put( id, entityWork ); } entityWork.addWork( work ); } }
switch ( work.getType() ) { case ADD: operation = interceptor.onAdd( work.getEntity() ); break; default: throw new AssertionFailure( "Unknown work type: " + work.getType() ); case SKIP: result = null; log.forceSkipIndexOperationViaInterception( entityType, work.getType() ); break; case UPDATE: result = new Work( work.getTenantIdentifier(), work.getEntity(), work.getId(), WorkType.UPDATE ); log.forceUpdateOnIndexOperationViaInterception( entityType, work.getType() ); break; case REMOVE: log.forceRemoveOnIndexOperationViaInterception( entityType, work.getType() ); break; default:
WorkType type = work.getType(); switch ( type ) { case INDEX:
WorkType type = work.getType(); switch ( type ) { case INDEX:
public void workApplied(Work work) { workLog.add( work ); if ( work.getType().equals( ADD ) ) { lastAddedDocumentId = (Integer) work.getId(); } }
entity = work.getEntity(); tenantId = work.getTenantIdentifier(); WorkType type = work.getType();
entity = work.getEntity(); tenantId = work.getTenantIdentifier(); WorkType type = work.getType();
/** * Calculates the expect number of documents in the index by replaying * the work log, taking into account deletes, add and updates * * @return index size */ public int calculateIndexSize() { Set<Serializable> added = new HashSet<>(); for ( Work work : workLog ) { if ( work.getType().equals( DELETE ) ) { added.remove( work.getId() ); } else { added.add( work.getId() ); } } return added.size(); } }