private LuceneProcessingChain createProcessingChain(Map<String, Object> namedParameters, FullTextSession fullTextSession) { SearchIntegrator searchFactory = fullTextSession.getSearchFactory().unwrap( SearchIntegrator.class ); return new LuceneProcessingChain.Builder( searchFactory, entityNamesResolver ) .namedParameters( namedParameters ) .buildProcessingChainForClassBasedEntities(); }
/** * Verifies this test is being run on the expected deployment and the expected * backend Implementation * @throws IOException */ public void assertConfiguration(String testLabel, String expectedDeploymentName, String expectedBackendImplementation) throws IOException { log.debug( testLabel + " / " + expectedDeploymentName ); // Check the deployment ClassLoader classLoader = this.getClass().getClassLoader(); Properties p = new Properties(); try ( InputStream inputStream = classLoader.getResourceAsStream( DeploymentJmsMasterSlave.CONFIGURATION_PROPERTIES_RESOURCENAME ) ) { p.load( inputStream ); } String actualDeployment = p.getProperty( "deploymentName" ); if ( actualDeployment == null ) { throw new IllegalStateException( "Deployment Name not found in properties" ); } if ( ! expectedDeploymentName.equals( actualDeployment ) ) { throw new IllegalStateException( "Was expecting to run on deployment " + expectedDeploymentName + " but is running on " + actualDeployment + ". Defined by looking into classloader: " + classLoader ); } // Check the running backend type ExtendedSearchIntegrator searchIntegrator = Search.getFullTextEntityManager( em ).getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); BackendQueueProcessor backendQueueProcessor = searchIntegrator.getIndexManagerHolder().getBackendQueueProcessor( "membersIndex" ); final String backendName = backendQueueProcessor.getClass().getName(); if ( ! backendName.equals( expectedBackendImplementation ) ) { throw new IllegalStateException( "Not running the expected backend '" + expectedBackendImplementation + "' but running '" + backendName + "'" ); } }
private DirectoryBasedIndexManager getSingleIndexManager(Class<?> clazz) { SearchIntegrator searchIntegrator = getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding indexBindingForEntity = searchIntegrator.getIndexBindings().get( clazz ); Set<IndexManager> indexManagers = indexBindingForEntity.getIndexManagerSelector().all(); assertEquals( 1, indexManagers.size() ); return (DirectoryBasedIndexManager) indexManagers.iterator().next(); }
private void assertCorrectDirectoryType(SessionFactory factory, String className) { Session session = factory.openSession(); FullTextSession fullTextSession = Search.getFullTextSession( session ); SearchIntegrator integrator = fullTextSession.getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding snowIndexBinder = integrator.getIndexBindings().get( SnowStorm.class ); Set<IndexManager> indexManagers = snowIndexBinder.getIndexManagerSelector().all(); assertTrue( "Wrong number of directory providers", indexManagers.size() == 1 ); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexManagers.iterator().next(); Directory directory = indexManager.getDirectoryProvider().getDirectory(); assertEquals( "Wrong directory provider type", className, directory.getClass().getName() ); session.close(); }
/** * Counts the number of nodes in the cluster on this node * * @param node the FullTextSessionBuilder representing the current node * @return the number of nodes as seen by the current node */ public static int clusterSize(FullTextSessionBuilder node, IndexedTypeIdentifier entityType) { SearchIntegrator integrator = node.getSearchFactory().unwrap(SearchIntegrator.class); EntityIndexBinding indexBinding = integrator.getIndexBinding(entityType); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexBinding.getIndexManagerSelector().all().iterator().next(); InfinispanDirectoryProvider directoryProvider = (InfinispanDirectoryProvider) indexManager.getDirectoryProvider(); EmbeddedCacheManager cacheManager = directoryProvider.getCacheManager(); List<Address> members = cacheManager.getMembers(); return members.size(); }
private void testOnce() { FullTextSessionBuilder builder = new FullTextSessionBuilder() .setProperty( "hibernate.search.default.directory_provider", org.hibernate.search.test.directoryProvider.CloseCheckingDirectoryProvider.class.getName() ) .addAnnotatedClass( SnowStorm.class ) .build(); CloseCheckingDirectoryProvider directoryProvider; try { SearchIntegrator integrator = builder.getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding snowIndexBinder = integrator.getIndexBindings().get( SnowStorm.class ); Set<IndexManager> indexManagers = snowIndexBinder.getIndexManagerSelector().all(); assertThat( indexManagers.size() ).isEqualTo( 1 ); assertThat( indexManagers.iterator().next() ).isInstanceOf( DirectoryBasedIndexManager.class ); DirectoryBasedIndexManager dbBasedManager = (DirectoryBasedIndexManager)indexManagers.iterator().next(); assertThat( dbBasedManager.getDirectoryProvider() ).isInstanceOf( CloseCheckingDirectoryProvider.class ); directoryProvider = (CloseCheckingDirectoryProvider) dbBasedManager.getDirectoryProvider(); assertThat( directoryProvider.isInitialized() ).isTrue(); assertThat( directoryProvider.isStarted() ).isTrue(); assertThat( directoryProvider.isStopped() ).isFalse(); } finally { builder.close(); } assertThat( directoryProvider.isStopped() ).isTrue(); }
/** * Counts the number of nodes in the cluster on this node * * @param node the FullTextSessionBuilder representing the current node * @return */ protected int clusterSize(FullTextSessionBuilder node, Class<?> entityType) { SearchIntegrator integrator = node.getSearchFactory().unwrap(SearchIntegrator.class); EntityIndexBinding indexBinding = integrator.getIndexBinding(TOASTER_TYPE); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexBinding.getIndexManagerSelector().all().iterator().next(); InfinispanDirectoryProvider directoryProvider = (InfinispanDirectoryProvider) indexManager.getDirectoryProvider(); EmbeddedCacheManager cacheManager = directoryProvider.getCacheManager(); List<Address> members = cacheManager.getMembers(); return members.size(); } }
@Test public void testNothingTest() { SearchIntegrator searchFactory = getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding indexBindingForEntity = searchFactory.getIndexBindings().get( Clock.class ); Set<IndexManager> indexManagers = indexBindingForEntity.getIndexManagerSelector().all(); assertEquals( 1, indexManagers.size() ); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexManagers.iterator().next(); WorkspaceHolder backend = (WorkspaceHolder) indexManager.getWorkspaceHolder(); assertEquals( 5, backend.getIndexResources().getMaxQueueLength() ); }
private void verifyIndexIsLocked(boolean isLocked, Class type) throws IOException { SearchIntegrator searchIntegrator = builder.getSearchFactory().unwrap( SearchIntegrator.class ); IndexManager indexManager = searchIntegrator.getIndexBindings().get( type ) .getIndexManagerSelector().all().iterator().next(); // No need to check for alternative implementations such as ES if ( indexManager instanceof DirectoryBasedIndexManager ) { Directory directory = ( (DirectoryBasedIndexManager) indexManager ).getDirectoryProvider().getDirectory(); Assert.assertEquals( isLocked, LuceneBackendTestHelpers.isLocked( directory ) ); } }
private Set<IndexManager> getIndexManagersAfterReopening() { // build a new independent SessionFactory to verify that the shards are available at restart Configuration config = new Configuration(); config.setProperty( "hibernate.search.Animal.sharding_strategy", DynamicShardingTest.AnimalShardIdentifierProvider.class.getName() ); // use filesystem based directory provider to be able to assert against index config.setProperty( "hibernate.search.default.directory_provider", "filesystem" ); Path sub = getBaseIndexDir(); config.setProperty( "hibernate.search.default.indexBase", sub.toAbsolutePath().toString() ); config.addAnnotatedClass( Animal.class ); try ( SessionFactory newSessionFactory = config.buildSessionFactory() ) { try ( FullTextSession fullTextSession = Search.getFullTextSession( newSessionFactory.openSession() ) ) { ExtendedSearchIntegrator integrator = fullTextSession.getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); return integrator.getIndexBindings().get( Animal.class ).getIndexManagerSelector().all(); } } }
@Test public void testInjectionHappened() throws Exception { SearchIntegrator searchFactory = getSearchFactory().unwrap( SearchIntegrator.class ); MessageSenderService sender = searchFactory.getServiceManager().requestService( MessageSenderService.class ); Assert.assertTrue( masterChannel.getAddress().equals( sender.getAddress() ) ); searchFactory.getServiceManager().releaseService( MessageSenderService.class ); }
@Test public void canDeleteByQuery() throws Exception { Session s = openSession(); FullTextSession session = Search.getFullTextSession( s ); ExtendedSearchIntegrator integrator = session.getSearchFactory() .unwrap( ExtendedSearchIntegrator.class ); DeleteByQueryWork queryWork = new DeleteByQueryWork( new PojoIndexedTypeIdentifier( HockeyPlayer.class ), new SingularTermDeletionQuery( "active", "false" ) ); TransactionContext tc = new TransactionContextForTest(); integrator.getWorker().performWork( queryWork, tc ); integrator.getWorker().flushWorks( tc ); QueryDescriptor query = ElasticsearchQueries.fromJson( "{ 'query': { 'match_all' : {} } }" ); Transaction tx = s.beginTransaction(); @SuppressWarnings("unchecked") List<HockeyPlayer> result = session.createFullTextQuery( query, HockeyPlayer.class ).list(); assertThat( result ).extracting( "name" ).containsExactlyInAnyOrder( "Hergesheimer", "Brand" ); tx.commit(); s.close(); }
private void testUseOfSelectedLockingFactory(String optionName, String expectedLockTypeName, boolean useRamDirectory) throws IOException { FullTextSessionBuilder builder = new FullTextSessionBuilder(); FullTextSessionBuilder fullTextSessionBuilder = builder.addAnnotatedClass( SnowStorm.class ); if ( optionName != null ) { fullTextSessionBuilder.setProperty( "hibernate.search.default.locking_strategy", optionName ); } if ( ! useRamDirectory ) { fullTextSessionBuilder.useFileSystemDirectoryProvider( CustomLockProviderTest.class ); } FullTextSessionBuilder ftsb = fullTextSessionBuilder.build(); try { SearchIntegrator integrator = ftsb.getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding indexBindingForEntity = integrator.getIndexBindings().get( SnowStorm.class ); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexBindingForEntity.getIndexManagerSelector().all().iterator().next(); DirectoryProvider<?> directoryProvider = indexManager.getDirectoryProvider(); Directory directory = directoryProvider.getDirectory(); try ( Lock lock = directory.obtainLock( "my-lock" ) ) { assertEquals( expectedLockTypeName, lock.getClass().getName() ); } } finally { builder.close(); } assertEquals( null, CustomLockFactoryProvider.optionValue ); }
private void verifyBackendUsage(String name, Class<? extends BackendQueueProcessor> backendType) { FullTextSessionBuilder builder = new FullTextSessionBuilder(); FullTextSession ftSession = builder .setProperty( "hibernate.search.default.worker.backend", name ) .addAnnotatedClass( BlogEntry.class ) .openFullTextSession(); ExtendedSearchIntegrator integrator = ftSession.getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); ftSession.close(); IndexManagerHolder allIndexesManager = integrator.getIndexManagerHolder(); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) allIndexesManager.getIndexManager( "org.hibernate.search.test.configuration.BlogEntry" ); BackendQueueProcessor backendQueueProcessor = allIndexesManager.getBackendQueueProcessor( indexManager.getIndexName() ); assertEquals( backendType, backendQueueProcessor.getClass() ); builder.close(); }
@Test public void testReindexingWithCompositeIds() throws InterruptedException { try ( FullTextSession fullTextSession = ftsBuilder.openFullTextSession() ) { storeTestData( fullTextSession ); } try ( FullTextSession fullTextSession = ftsBuilder.openFullTextSession() ) { fullTextSession.createIndexer().startAndWait(); } SearchFactory searchFactory = ftsBuilder.getSearchFactory(); SearchIntegrator searchIntegrator = searchFactory.unwrap( SearchIntegrator.class ); CountingErrorHandler errorHandler = (CountingErrorHandler) searchIntegrator.getErrorHandler(); assertEquals( 0, errorHandler.getTotalCount() ); }
@Test public void verifyIndexExclusivity() { FullTextSessionBuilder builder = new FullTextSessionBuilder(); FullTextSession ftSession = builder .setProperty( "hibernate.search.Book.indexmanager", "near-real-time" ) .setProperty( "hibernate.search." + Foo.class.getName() + ".indexmanager", FooIndexManager.class.getName() ) .addAnnotatedClass( BlogEntry.class ) .addAnnotatedClass( Foo.class ) .addAnnotatedClass( org.hibernate.search.test.query.Book.class ) .addAnnotatedClass( org.hibernate.search.test.query.Author.class ) .openFullTextSession(); ExtendedSearchIntegrator integrator = ftSession.getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); ftSession.close(); IndexManagerHolder allIndexesManager = integrator.getIndexManagerHolder(); //checks for the default implementation checkIndexManagerType( allIndexesManager, "org.hibernate.search.test.configuration.BlogEntry", org.hibernate.search.indexes.spi.DirectoryBasedIndexManager.class ); //Uses "NRT" taken from shortcut names checkIndexManagerType( allIndexesManager, "Book", org.hibernate.search.indexes.impl.NRTIndexManager.class ); //Uses a fully qualified name to load an implementation checkIndexManagerType( allIndexesManager, Foo.class.getName(), FooIndexManager.class ); builder.close(); }
@Test public void testMasterDelayedInitialization() { slave = createSlaveNode( true ); assertNotNull( FSSlaveDirectoryProviderTestingExtension.taskScheduled ); Long scheduledPeriod = FSSlaveDirectoryProviderTestingExtension.taskScheduledPeriod; assertNotNull( scheduledPeriod ); assertEquals( Long.valueOf( 12000L ), scheduledPeriod ); SearchIntegrator integrator = slave.getSearchFactory().unwrap( SearchIntegrator.class ); EntityIndexBinding snowIndexBinder = integrator.getIndexBindings().get( SnowStorm.class ); Set<IndexManager> indexManagers = snowIndexBinder.getIndexManagerSelector().all(); assertEquals( 1, indexManagers.size() ); DirectoryBasedIndexManager indexManager = (DirectoryBasedIndexManager) indexManagers.iterator().next(); FSSlaveDirectoryProviderTestingExtension dp = (FSSlaveDirectoryProviderTestingExtension) indexManager.getDirectoryProvider(); // now as master wasn't started yet, it should return a "dummy" index a RAMDirectory Directory directory = dp.getDirectory(); assertTrue( directory instanceof RAMDirectory ); dp.triggerTimerAction(); // still didn't start it.. directory = dp.getDirectory(); assertTrue( directory instanceof RAMDirectory ); // now the master goes online, at first timer tick we'll switch to the real index master = createMasterNode(); dp.triggerTimerAction(); directory = dp.getDirectory(); assertTrue( directory instanceof FSDirectory ); }
/** * Tests that adding and deleting the same entity only results into a single delete in the work queue. * See HSEARCH-293. * * @throws Exception in case the test fails. */ @Test public void testAddWorkGetReplacedByDeleteWork() throws Exception { FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession( openSession() ); ExtendedSearchIntegrator integrator = fullTextSession.getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); // create test entity SpecialPerson person = new SpecialPerson(); person.setName( "Joe Smith" ); EmailAddress emailAddress = new EmailAddress(); emailAddress.setAddress( "foo@foobar.com" ); emailAddress.setDefaultAddress( true ); person.addEmailAddress( emailAddress ); WorkQueue plannerEngine = new WorkQueue( integrator ); plannerEngine.add( new Work( person, 1, WorkType.ADD ) ); plannerEngine.prepareWorkPlan(); List<LuceneWork> sealedQueue = plannerEngine.getSealedQueue(); assertEquals( "There should only be one job in the queue", 1, sealedQueue.size() ); assertTrue( "Wrong job type", sealedQueue.get( 0 ) instanceof AddLuceneWork ); plannerEngine.add( new Work( person, 1, WorkType.DELETE ) ); plannerEngine.prepareWorkPlan(); sealedQueue = plannerEngine.getSealedQueue(); assertEquals( "Jobs should have countered each other", 0, sealedQueue.size() ); fullTextSession.close(); }
@Test public void verifyIndexExclusivity() { FullTextSessionBuilder builder = new FullTextSessionBuilder(); FullTextSession ftSession = builder .setProperty( "hibernate.search.org.hibernate.search.test.configuration.BlogEntry.exclusive_index_use", "true" ) .setProperty( "hibernate.search.Book.exclusive_index_use", "false" ) .addAnnotatedClass( BlogEntry.class ) .addAnnotatedClass( Foo.class ) .addAnnotatedClass( org.hibernate.search.test.query.Book.class ) .addAnnotatedClass( org.hibernate.search.test.query.Author.class ) .openFullTextSession(); ExtendedSearchIntegrator integrator = ftSession.getSearchFactory().unwrap( ExtendedSearchIntegrator.class ); ftSession.close(); IndexManagerHolder allIndexesManager = integrator.getIndexManagerHolder(); //explicitly enabled: assertExclusiveIsEnabled( allIndexesManager, "org.hibernate.search.test.configuration.BlogEntry", true ); //explicitly disabled (this entity defined a short index name): assertExclusiveIsEnabled( allIndexesManager, "Book", false ); //using default: assertExclusiveIsEnabled( allIndexesManager, Foo.class.getName(), true ); builder.close(); }
@Test public void testProvidedIdMapping() throws Exception { FullTextSession fullTextSession = Search.getFullTextSession( openSession() ); SearchIntegrator sf = fullTextSession.getSearchFactory().unwrap( SearchIntegrator.class ); ProvidedIdEntry person1 = new ProvidedIdEntry(); person1.setName( "Big Goat" ); person1.setBlurb( "Eats grass" ); ProvidedIdEntry person2 = new ProvidedIdEntry(); person2.setName( "Mini Goat" ); person2.setBlurb( "Eats cheese" ); ProvidedIdEntry person3 = new ProvidedIdEntry(); person3.setName( "Regular goat" ); person3.setBlurb( "Is anorexic" ); SearchITHelper helper = new SearchITHelper( () -> sf ); helper.index() .push( person1, 1 ) .push( person2, 2 ) .push( person3, 3 ) .execute(); Transaction transaction = fullTextSession.beginTransaction(); //we cannot use FTQuery because @ProvidedId does not provide the getter id and Hibernate Hsearch Query extension //needs it. So we use plain HSQuery helper.assertThat( "providedidentry.name", "goat" ) .from( ProvidedIdEntry.class ) .hasResultSize( 3 ); transaction.commit(); getSession().close(); }