@Override public void doWorkInSync(LuceneWork work) { //FIXME I need a "Force sync" actually for when using PurgeAll before the indexing starts transactionalDispatcher.dispatch( work, progressMonitor ); }
@Override public void doWorkInSync(LuceneWork work) { //FIXME I need a "Force sync" actually for when using PurgeAll before the indexing starts transactionalDispatcher.dispatch( work, progressMonitor ); }
@Override public void enqueueAsyncWork(LuceneWork work) throws InterruptedException { streamingDispatcher.dispatch( work, progressMonitor ); }
@Override public void enqueueAsyncWork(LuceneWork work) throws InterruptedException { streamingDispatcher.dispatch( work, progressMonitor ); }
private void applyLuceneWorkLocally(List<LuceneWork> queue, Message message) { if ( queue != null && !queue.isEmpty() ) { if ( log.isDebugEnabled() ) { log.debugf( "There are %d Lucene docs received from slave node %s to be processed if this node is the master", (Integer) queue.size(), message.getSrc() ); } OperationDispatcher operationDispatcher = getOperationDispatcher(); operationDispatcher.dispatch( queue, null ); } else { log.receivedEmptyLuceneWorksInMessage(); } }
dispatcher.dispatch( queue, null );
dispatcher.dispatch( queue, null );
dispatcher.dispatch( queue, null );
dispatcher.dispatch( queue, null );
@Benchmark @GroupThreads(3 * AbstractBookEntity.TYPE_COUNT) public void write(StreamWriteEngineHolder eh, StreamAddEntityGenerator generator, StreamWriteCounters counters, Blackhole blackhole) { SearchIntegrator si = eh.getSearchIntegrator(); OperationDispatcher streamingDispatcher = new StreamingOperationDispatcher( si, true /* forceAsync */ ); IndexedTypeIdentifier typeId = generator.getTypeId(); DocumentBuilderIndexedEntity docBuilder = si.getIndexBinding( typeId ).getDocumentBuilder(); InstanceInitializer initializer = SimpleInitializer.INSTANCE; ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); IndexingMonitor monitor = blackhole::consume; generator.stream().forEach( book -> { Long id = book.getId(); AddLuceneWork addWork = docBuilder.createAddWork( null, docBuilder.getTypeIdentifier(), book, id, docBuilder.getIdBridge().objectToString( id ), initializer, conversionContext ); streamingDispatcher.dispatch( addWork, monitor ); ++counters.add; } ); // Ensure that we'll block until all works have been performed SearchIntegratorHelper.flush( si, typeId ); }