private boolean reindexUserDataRegion(Integer bucketId, PartitionedRegion userRegion, PartitionedRegion fileRegion, BucketRegion dataBucket, IndexRepository repo) throws IOException { Set<IndexRepository> affectedRepos = new HashSet<IndexRepository>(); for (Object key : dataBucket.keySet()) { Object value = getValue(userRegion.getEntry(key)); if (value != null) { repo.update(key, value); } else { repo.delete(key); } affectedRepos.add(repo); } for (IndexRepository affectedRepo : affectedRepos) { affectedRepo.commit(); } // fileRegion ops (get/put) need bucketId as a callbackArg for PartitionResolver fileRegion.put(APACHE_GEODE_INDEX_COMPLETE, APACHE_GEODE_INDEX_COMPLETE, bucketId); return true; }
@Override public IndexRepository computeIndexRepository(final Integer bucketId, LuceneSerializer serializer, InternalLuceneIndex index, PartitionedRegion userRegion, IndexRepository oldRepository, PartitionedRepositoryManager partitionedRepositoryManager) throws IOException { final IndexRepository indexRepo = super.computeIndexRepository(bucketId, serializer, index, userRegion, oldRepository, partitionedRepositoryManager); if (indexRepo == null) { return null; } if (mockingDetails(indexRepo).isSpy()) { return indexRepo; } final IndexRepository spy = Mockito.spy(indexRepo); Answer invokeBeforeWrite = invocation -> { beforeWrite.accept(invocation.getArgument(0)); return invocation.callRealMethod(); }; doAnswer(invokeBeforeWrite).when(spy).update(any(), any()); doAnswer(invokeBeforeWrite).when(spy).create(any(), any()); doAnswer(invokeBeforeWrite).when(spy).delete(any()); return spy; }
@Override protected IndexRepository getRepository(Integer bucketId) throws BucketNotFoundException { IndexRepository repo = indexRepositories.get(bucketId); if (repo != null && !repo.isClosed()) { return repo; } repo = computeRepository(bucketId); return repo; }
@Before public void createMocks() throws BucketNotFoundException { GemFireCacheImpl cache = Fakes.cache(); context = mock(RegionFunctionContext.class); ResultSender sender = mock(ResultSender.class); Region region = mock(Region.class); InternalLuceneService service = mock(InternalLuceneService.class); InternalLuceneIndex index = mock(InternalLuceneIndex.class); RepositoryManager repoManager = mock(RepositoryManager.class); IndexRepository repo = mock(IndexRepository.class); IndexWriter writer = mock(IndexWriter.class); RegionDirectory directory = mock(RegionDirectory.class); fileSystem = mock(FileSystem.class); Region bucket = mock(Region.class); when(bucket.getFullPath()).thenReturn(bucketName); when(context.getArguments()).thenReturn(new String[] {directoryName, indexName}); when(context.getResultSender()).thenReturn(sender); when(context.getDataSet()).thenReturn(region); when(region.getCache()).thenReturn(cache); when(cache.getService(any())).thenReturn(service); when(repoManager.getRepositories(eq(context))).thenReturn(Collections.singleton(repo)); when(index.getRepositoryManager()).thenReturn(repoManager); when(index.getName()).thenReturn(indexName); when(service.getIndex(eq(indexName), any())).thenReturn(index); when(directory.getFileSystem()).thenReturn(fileSystem); when(writer.getDirectory()).thenReturn(directory); when(repo.getWriter()).thenReturn(writer); when(repo.getRegion()).thenReturn(bucket); }
@Test(expected = FunctionException.class) public void testIndexRepoQueryFails() throws Exception { when(mockContext.getDataSet()).thenReturn(mockRegion); when(mockContext.getArguments()).thenReturn(searchArgs); when(mockContext.getResultSender()).thenReturn(mockResultSender); when(mockRepoManager.getRepositories(eq(mockContext), eq(false))).thenReturn(repos); doThrow(IOException.class).when(mockRepository1).query(eq(query), eq(LuceneQueryFactory.DEFAULT_LIMIT), any(IndexResultCollector.class)); LuceneQueryFunction function = new LuceneQueryFunction(); function.execute(mockContext); }
@Override public IndexRepository computeIndexRepository(final Integer bucketId, LuceneSerializer serializer, InternalLuceneIndex index, PartitionedRegion userRegion, IndexRepository oldRepository, PartitionedRepositoryManager partitionedRepositoryManager) throws IOException { final IndexRepository repo; if (oldRepository != null) { oldRepository.cleanup(); } LuceneRawIndex indexForRaw = (LuceneRawIndex) index; BucketRegion dataBucket = getMatchingBucket(userRegion, bucketId); Directory dir = null; if (indexForRaw.withPersistence()) { String bucketLocation = LuceneServiceImpl.getUniqueIndexName(index.getName(), index.getRegionPath() + "_" + bucketId); File location = new File(index.getName(), bucketLocation); if (!location.exists()) { location.mkdirs(); } dir = new NIOFSDirectory(location.toPath()); } else { dir = new RAMDirectory(); } IndexWriterConfig config = new IndexWriterConfig(indexForRaw.getAnalyzer()); IndexWriter writer = new IndexWriter(dir, config); return new IndexRepositoryImpl(null, writer, serializer, indexForRaw.getIndexStats(), dataBucket, null, "", indexForRaw); } }
final Collection<IndexRepository> repositories = repoManager.getRepositories(ctx); repositories.stream().forEach(repo -> { final IndexWriter writer = repo.getWriter(); RegionDirectory directory = (RegionDirectory) writer.getDirectory(); FileSystem fs = directory.getFileSystem(); String bucketName = index.getName() + "_" + repo.getRegion().getFullPath(); bucketName = bucketName.replace("/", "_"); File bucketDirectory = new File(exportLocation, bucketName);
@Test public void injectCustomCollectorManager() throws Exception { final CollectorManager mockManager = mock(CollectorManager.class); searchArgs = new LuceneFunctionContext<IndexResultCollector>(queryProvider, "indexName", mockManager); when(mockContext.getDataSet()).thenReturn(mockRegion); when(mockContext.getArguments()).thenReturn(searchArgs); when(mockContext.getResultSender()).thenReturn(mockResultSender); repos.remove(0); when(mockRepoManager.getRepositories(eq(mockContext), eq(false))).thenReturn(repos); when(mockManager.newCollector(eq("repo2"))).thenReturn(mockCollector); when(mockManager.reduce(any(Collection.class))).thenAnswer(invocation -> { Collection<IndexResultCollector> collectors = invocation.getArgument(0); assertEquals(1, collectors.size()); assertEquals(mockCollector, collectors.iterator().next()); return new TopEntriesCollector(null); }); doAnswer(invocation -> { IndexResultCollector collector = invocation.getArgument(2); collector.collect(r2_1.getKey(), r2_1.getScore()); return null; }).when(mockRepository2).query(eq(query), eq(LuceneQueryFactory.DEFAULT_LIMIT), any(IndexResultCollector.class)); LuceneQueryFunction function = new LuceneQueryFunction(); function.execute(mockContext); verify(mockCollector).collect(eq("key-2-1"), eq(.45f)); verify(mockResultSender).lastResult(any(TopEntriesCollector.class)); }
protected IndexRepository computeRepository(Integer bucketId) { try { isDataRegionReady.await(); } catch (InterruptedException e) { throw new InternalGemFireError("Unable to create index repository", e); } IndexRepository repo = indexRepositories.compute(bucketId, (key, oldRepository) -> { try { if (closed) { if (oldRepository != null) { oldRepository.cleanup(); } throw new LuceneIndexDestroyedException(index.getName(), index.getRegionPath()); } return computeRepository(bucketId, serializer, index, userRegion, oldRepository); } catch (IOException e) { throw new InternalGemFireError("Unable to create index repository", e); } }); return repo; }
collector.collect(r1_3.getKey(), r1_3.getScore()); return null; }).when(mockRepository1).query(eq(query), eq(LuceneQueryFactory.DEFAULT_LIMIT), any(IndexResultCollector.class)); collector.collect(r2_2.getKey(), r2_2.getScore()); return null; }).when(mockRepository2).query(eq(query), eq(LuceneQueryFactory.DEFAULT_LIMIT), any(IndexResultCollector.class));
/** * Return the repository for a given user bucket */ protected IndexRepository getRepository(Integer bucketId) throws BucketNotFoundException { IndexRepository repo = indexRepositories.get(bucketId); if (repo != null && !repo.isClosed()) { return repo; } repo = computeRepository(bucketId); if (repo == null) { throw new BucketNotFoundException( "Unable to find lucene index because no longer primary for bucket " + bucketId); } return repo; }
logger.debug("Executing search on repo: " + repo.toString()); repo.query(query, resultLimit, collector); results.add(collector);