/** * Operations to do after all subthreads finished their work on index * @param backend */ private void afterBatch(BatchBackend backend) { IndexedTypeSet targetedClasses = extendedIntegrator.getIndexedTypesPolymorphic( rootEntities ); if ( this.optimizeAtEnd ) { backend.optimize( targetedClasses ); } backend.flush( targetedClasses ); }
/** * Optional operations to do before the multiple-threads start indexing * @param backend */ private void beforeBatch(BatchBackend backend) { if ( this.purgeAtStart ) { //purgeAll for affected entities IndexedTypeSet targetedClasses = extendedIntegrator.getIndexedTypesPolymorphic( rootEntities ); for ( IndexedTypeIdentifier type : targetedClasses ) { //needs do be in-sync work to make sure we wait for the end of it. backend.doWorkInSync( new PurgeAllLuceneWork( tenantId, type ) ); } if ( this.optimizeAfterPurge ) { backend.optimize( targetedClasses ); } } }
private void indexAsStream(Serializable id, Object entity) throws InterruptedException { LuceneWork work = createUpdateWork( id, entity ); sfHolder.getBatchBackend().enqueueAsyncWork( work ); }
/** * batch indexing has been interrupted : flush to apply all index update realized before interruption * * @param backend */ private void afterBatchOnInterruption(BatchBackend backend) { IndexedTypeSet targetedClasses = extendedIntegrator.getIndexedTypesPolymorphic( rootEntities ); backend.flush( targetedClasses ); }
private void index(Object entity, Session session, InstanceInitializer sessionInitializer, ConversionContext conversionContext) throws InterruptedException { Class<?> clazz = HibernateHelper.getClass( entity ); EntityIndexBinding entityIndexBinding = entityIndexBindings.get( clazz ); // it might be possible to receive not-indexes subclasses of the currently indexed type; // being not-indexed, we skip them. // FIXME for improved performance: avoid loading them in an early phase. if ( entityIndexBinding != null ) { EntityIndexingInterceptor<?> interceptor = entityIndexBinding.getEntityIndexingInterceptor(); if ( isNotSkippable( interceptor, entity ) ) { Serializable id = session.getIdentifier( entity ); AddLuceneWork addWork = createAddLuceneWork( tenantId, entity, sessionInitializer, conversionContext, id, entityIndexBinding ); backend.enqueueAsyncWork( addWork ); } } }
private void flush(Class<?> clazz) { sfHolder.getBatchBackend().flush( IndexedTypeSets.fromClass( clazz ) ); }
/** * Operations to do after all subthreads finished their work on index * * @param backend */ private void afterBatch(BatchBackend backend) { IndexedTypeSet targetedTypes = searchFactoryImplementor.getIndexedTypesPolymorphic( rootIndexedTypes ); if ( this.optimizeAtEnd ) { backend.optimize( targetedTypes ); } backend.flush( targetedTypes ); }
/** * Optional operations to do before the multiple-threads start indexing * * @param backend */ private void beforeBatch(BatchBackend backend) { if ( this.purgeAtStart ) { // purgeAll for affected entities IndexedTypeSet targetedTypes = searchFactoryImplementor.getIndexedTypesPolymorphic( rootIndexedTypes ); for ( IndexedTypeIdentifier type : targetedTypes ) { // needs do be in-sync work to make sure we wait for the end of it. backend.doWorkInSync( new PurgeAllLuceneWork( tenantId, type ) ); } if ( this.optimizeAfterPurge ) { backend.optimize( targetedTypes ); } } }
conversionContext ); backend.enqueueAsyncWork( addWork );