@Override public void addPurgeAllLuceneWork(String entityClassName) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); results.add( new PurgeAllLuceneWork( fromName ) ); }
@Override public String toString() { String tenant = getTenantId() == null ? "" : " [" + getTenantId() + "] "; return "PurgeAllLuceneWork" + tenant + ": " + this.getEntityType().getName(); }
@Override public BackendRequest<?> visitPurgeAllWork(PurgeAllLuceneWork work, Boolean refresh) { // TODO This requires the delete-by-query plug-in on ES 2.0 and beyond; Alternatively // the type mappings could be deleted, think about implications for concurrent access String query = work.getTenantId() != null ? String.format( Locale.ENGLISH, DELETE_ALL_FOR_TENANT_QUERY, work.getTenantId() ) : DELETE_ALL_QUERY; DeleteByQuery.Builder builder = new DeleteByQuery.Builder( query ) .refresh( refresh ) .addIndex( indexName ); Set<Class<?>> typesToDelete = searchIntegrator.getIndexedTypesPolymorphic( new Class<?>[] { work.getEntityClass() } ); for ( Class<?> typeToDelete : typesToDelete ) { builder.addType( typeToDelete.getName() ); } return new BackendRequest<>( builder.build(), work ); }
private static void assertPurgeAll(PurgeAllLuceneWork work, PurgeAllLuceneWork copy) { assertThat( work.getEntityType() ).as( "PurgeAllLuceneWork.getEntityClass is not copied" ) .isEqualTo( copy.getEntityType() ); }
@Override public void addPurgeAllLuceneWork(String entityClassName) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); results.add( new PurgeAllLuceneWork( fromName ) ); }
@Override public String toString() { String tenant = getTenantId() == null ? "" : " [" + getTenantId() + "] "; return "PurgeAllLuceneWork" + tenant + ": " + this.getEntityType().getName(); }
/** * Enqueues all work needed to be performed according to current state into * the LuceneWork queue. * * @param luceneQueue work will be appended to this list */ public void enqueueLuceneWork(List<LuceneWork> luceneQueue) { final Set<Entry<Serializable, PerEntityWork>> entityInstances = entityById.entrySet(); final ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); if ( purgeAll ) { luceneQueue.add( new PurgeAllLuceneWork( tenantId, typeIdentifier ) ); } for ( DeletionQuery delQuery : this.deletionQueries ) { luceneQueue.add( new DeleteByQueryLuceneWork( tenantId, typeIdentifier, delQuery ) ); } for ( Entry<Serializable, PerEntityWork> entry : entityInstances ) { Serializable indexingId = entry.getKey(); PerEntityWork perEntityWork = entry.getValue(); String tenantIdentifier = perEntityWork.getTenantIdentifier(); perEntityWork.enqueueLuceneWork( tenantIdentifier, typeIdentifier, indexingId, documentBuilder, luceneQueue, conversionContext ); } }
@Override public ElasticsearchWork<?> visitPurgeAllWork(PurgeAllLuceneWork work, IndexingMonitor monitor) { JsonObject payload = createDeleteByQueryPayload( JsonBuilder.object().add( "match_all", new JsonObject() ).build(), work.getTenantId() ); DeleteByQueryWorkBuilder builder = workFactory.deleteByQuery( indexName, payload ) .luceneWork( work ) .markIndexDirty( refreshAfterWrite ); /* * Deleting only the given type. * Inheritance trees are handled at a higher level by creating multiple purge works. */ builder.type( URLEncodedString.fromString( work.getEntityType().getName() ) ); return builder.build(); }
public void purge(IndexedTypeSet entityTypes) { performShardAwareOperation(entityTypes, (im, type) -> { im.performStreamOperation(new PurgeAllLuceneWork(type), progressMonitor, false); }); flush(entityTypes); }
@Override public ElasticsearchWork<?> visitPurgeAllWork(PurgeAllLuceneWork work, IndexingMonitor monitor) { JsonObject payload = createDeleteByQueryPayload( JsonBuilder.object().add( "match_all", new JsonObject() ).build(), work.getTenantId() ); DeleteByQueryWorkBuilder builder = workFactory.deleteByQuery( indexName, payload ) .luceneWork( work ) .markIndexDirty( refreshAfterWrite ); /* * Deleting only the given type. * Inheritance trees are handled at a higher level by creating multiple purge works. */ builder.type( URLEncodedString.fromString( work.getEntityType().getName() ) ); return builder.build(); }
/** * Enqueues all work needed to be performed according to current state into * the LuceneWork queue. * * @param luceneQueue work will be appended to this list */ public void enqueueLuceneWork(List<LuceneWork> luceneQueue) { final Set<Entry<Serializable, PerEntityWork>> entityInstances = entityById.entrySet(); final ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); if ( purgeAll ) { luceneQueue.add( new PurgeAllLuceneWork( tenantId, typeIdentifier ) ); } for ( DeletionQuery delQuery : this.deletionQueries ) { luceneQueue.add( new DeleteByQueryLuceneWork( tenantId, typeIdentifier, delQuery ) ); } for ( Entry<Serializable, PerEntityWork> entry : entityInstances ) { Serializable indexingId = entry.getKey(); PerEntityWork perEntityWork = entry.getValue(); String tenantIdentifier = perEntityWork.getTenantIdentifier(); perEntityWork.enqueueLuceneWork( tenantIdentifier, typeIdentifier, indexingId, documentBuilder, luceneQueue, conversionContext ); } }
public void purge(IndexedTypeSet entityTypes) { performShardAwareOperation(entityTypes, (im, type) -> { im.performStreamOperation(new PurgeAllLuceneWork(type), progressMonitor, false); }); flush(entityTypes); }
/** * Optional operations to do before the multiple-threads start indexing * * @param backend */ private void beforeBatch(BatchBackend backend) { if ( this.purgeAtStart ) { // purgeAll for affected entities IndexedTypeSet targetedTypes = searchFactoryImplementor.getIndexedTypesPolymorphic( rootIndexedTypes ); for ( IndexedTypeIdentifier type : targetedTypes ) { // needs do be in-sync work to make sure we wait for the end of it. backend.doWorkInSync( new PurgeAllLuceneWork( tenantId, type ) ); } if ( this.optimizeAfterPurge ) { backend.optimize( targetedTypes ); } } }
/** * Optional operations to do before the multiple-threads start indexing * @param backend */ private void beforeBatch(BatchBackend backend) { if ( this.purgeAtStart ) { //purgeAll for affected entities IndexedTypeSet targetedClasses = extendedIntegrator.getIndexedTypesPolymorphic( rootEntities ); for ( IndexedTypeIdentifier type : targetedClasses ) { //needs do be in-sync work to make sure we wait for the end of it. backend.doWorkInSync( new PurgeAllLuceneWork( tenantId, type ) ); } if ( this.optimizeAfterPurge ) { backend.optimize( targetedClasses ); } } }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); IndexedTypeIdentifier type = new PojoIndexedTypeIdentifier( RemoteEntity.class ); works.add( new OptimizeLuceneWork( type ) ); //won't be send over works.add( new PurgeAllLuceneWork( type ) ); works.add( new PurgeAllLuceneWork( type ) ); works.add( new DeleteLuceneWork( 123L, "123", type ) ); works.add( new DeleteLuceneWork( "Foo", "Bar", type ) ); works.add( new AddLuceneWork( 125, "125", type, new Document() ) ); return works; }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<LuceneWork>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); works.add( new OptimizeLuceneWork( remoteTypeId ) ); //won't be send over works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new DeleteByQueryLuceneWork( remoteTypeId, new SingularTermDeletionQuery( "key", "value" ) ) ); works.add( new DeleteLuceneWork( 123L, "123", remoteTypeId ) ); works.add( new DeleteLuceneWork( "Sissi", "Sissi", remoteTypeId ) ); works.add( new DeleteLuceneWork( new URL( "http://emmanuelbernard.com" ), "http://emmanuelbernard.com", remoteTypeId ) ); Document doc = buildDocumentWithNumericFields(); Map<String, String> analyzers = new HashMap<String, String>(); analyzers.put( "godo", "ngram" ); works.add( new AddLuceneWork( 123, "123", remoteTypeId, doc, analyzers ) ); doc = buildDocumentWithMultipleMixedTypeFields(); works.add( new UpdateLuceneWork( 1234, "1234", remoteTypeId, doc ) ); works.add( new AddLuceneWork( 125, "125", remoteTypeId, new Document() ) ); return works; }
works.add( OptimizeLuceneWork.INSTANCE ); works.add( new OptimizeLuceneWork( remoteTypeId ) ); //class won't be send over works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new DeleteLuceneWork( 123L, "123", remoteTypeId ) ); works.add( new DeleteLuceneWork( "Sissi", "Sissi", remoteTypeId ) );