@Override public AddLuceneWork cloneOverridingIdString(final AddLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) { final Serializable id = lw.getId(); if (id == null) { //this is serialized work received from a remote node: take the getIdAsString instead final String idInString = lw.getIdInString(); return new AddLuceneWork(idInString, idInString, lw.getEntityType(), lw.getDocument(), lw.getFieldToAnalyzerMap()); } else { return lw; } } }
@Override public String toString() { String tenant = getTenantId() == null ? "" : " [" + getTenantId() + "] "; return "AddLuceneWork" + tenant + ": " + this.getEntityType().getName() + "#" + this.getIdInString(); }
@Override public BackendRequest<?> visitAddWork(AddLuceneWork work, Boolean refresh) { Action<?> index = indexDocument( DocumentIdHelper.getDocumentId( work ), work.getDocument(), work.getEntityClass(), refresh ); return new BackendRequest<>( index, work ); }
public AddLuceneWork createAddWork(String tenantId, IndexedTypeIdentifier entityType, Object entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) { Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>(); Document doc = getDocument( tenantId, entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext, null ); final AddLuceneWork addWork; if ( fieldToAnalyzerMap.isEmpty() ) { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc ); } else { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc, fieldToAnalyzerMap ); } return addWork; }
@Override public ElasticsearchWork<?> visitAddWork(AddLuceneWork work, IndexingMonitor monitor) { return indexDocument( getDocumentId( work ), work.getDocument(), work.getEntityType() ) .monitor( monitor ) .luceneWork( work ) .markIndexDirty( refreshAfterWrite ) .build(); }
@Test public void testStoreAndOmitNorms() throws Exception { Session session = openSession(); FullTextSession fullTextSession = Search.getFullTextSession( session ); Transaction tx = fullTextSession.beginTransaction(); NormsTestEntity test = new NormsTestEntity(); test.setWithNormsImplicit( "hello" ); test.setWithNormsExplicit( "world" ); test.setWithoutNorms( "how are you?" ); fullTextSession.save( test ); tx.commit(); List<LuceneWork> processedQueue = LeakingLocalBackend.getLastProcessedQueue(); assertTrue( processedQueue.size() == 1 ); AddLuceneWork addLuceneWork = (AddLuceneWork) processedQueue.get( 0 ); Document doc = addLuceneWork.getDocument(); IndexableField implicitNormField = doc.getField( "withNormsImplicit" ); assertFalse( "norms should be stored for this field", implicitNormField.fieldType().omitNorms() ); IndexableField explicitNormField = doc.getField( "withNormsExplicit" ); assertFalse( "norms should be stored for this field", explicitNormField.fieldType().omitNorms() ); IndexableField withoutNormField = doc.getField( "withoutNorms" ); assertTrue( "norms should not be stored for this field", withoutNormField.fieldType().omitNorms() ); }
public AddLuceneWork createAddWork(String tenantId, IndexedTypeIdentifier entityType, Object entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) { Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>(); Document doc = getDocument( tenantId, entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext, null ); final AddLuceneWork addWork; if ( fieldToAnalyzerMap.isEmpty() ) { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc ); } else { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc, fieldToAnalyzerMap ); } return addWork; }
@Override public ElasticsearchWork<?> visitAddWork(AddLuceneWork work, IndexingMonitor monitor) { return indexDocument( getDocumentId( work ), work.getDocument(), work.getEntityType() ) .monitor( monitor ) .luceneWork( work ) .markIndexDirty( refreshAfterWrite ) .build(); }
@Override public AddLuceneWork cloneOverridingIdString(final AddLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) { final Serializable id = lw.getId(); if (id == null) { //this is serialized work received from a remote node: take the getIdAsString instead final String idInString = lw.getIdInString(); return new AddLuceneWork(idInString, idInString, lw.getEntityType(), lw.getDocument(), lw.getFieldToAnalyzerMap()); } else { return lw; } } }
private List<LuceneWork> buildLuceneWorks(Document document) { List<LuceneWork> works = new ArrayList<>(); works.add( new AddLuceneWork( 123, "123", remoteTypeId, document ) ); return works; } }
@Override public String toString() { String tenant = getTenantId() == null ? "" : " [" + getTenantId() + "] "; return "AddLuceneWork" + tenant + ": " + this.getEntityType().getName() + "#" + this.getIdInString(); }
private LuceneWork extractWork(Object item) { AddLuceneWork addWork = (AddLuceneWork) item; switch ( writeMode ) { case ADD: return (AddLuceneWork) item; case UPDATE: return new UpdateLuceneWork( addWork.getId(), addWork.getIdInString(), addWork.getEntityType(), addWork.getDocument(), addWork.getFieldToAnalyzerMap() ); default: throw new AssertionFailure( "Invalid WriteMode: " + writeMode ); } }
private List<LuceneWork> makeSomeWork() { List<LuceneWork> list = new LinkedList<>(); //just some random data: list.add( new AddLuceneWork( Integer.valueOf( 5 ), "id:5", testTypeId, new Document() ) ); list.add( new AddLuceneWork( Integer.valueOf( 6 ), "id:6", testTypeId, new Document() ) ); return list; }
private static void assertAdd(AddLuceneWork work, AddLuceneWork copy) { assertThat( copy.getEntityType() ).as( "Add.getEntityClass is not copied" ).isEqualTo( work.getEntityType() ); assertThat( copy.getId() ).as( "Add.getId is not copied" ).isEqualTo( work.getId() ); assertThat( copy.getIdInString() ).as( "Add.getIdInString is not the same" ).isEqualTo( work.getIdInString() ); assertThat( copy.getFieldToAnalyzerMap() ).as( "Add.getFieldToAnalyzerMap is not the same" ) .isEqualTo( work.getFieldToAnalyzerMap() ); assertDocument( work.getDocument(), copy.getDocument() ); }
@Override public void addAddLuceneWork(String entityClassName, Map<String, String> fieldToAnalyzerMap, ConversionContext conversionContext) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); LuceneWork result = new AddLuceneWork( id, objectIdInString( fromName, id, conversionContext ), fromName, getLuceneDocument(), fieldToAnalyzerMap ); results.add( result ); clearDocument(); id = null; }
@Override public void addAddLuceneWork(String entityClassName, Map<String, String> fieldToAnalyzerMap, ConversionContext conversionContext) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); LuceneWork result = new AddLuceneWork( id, objectIdInString( fromName, id, conversionContext ), fromName, getLuceneDocument(), fieldToAnalyzerMap ); results.add( result ); clearDocument(); id = null; }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); IndexedTypeIdentifier type = new PojoIndexedTypeIdentifier( RemoteEntity.class ); works.add( new OptimizeLuceneWork( type ) ); //won't be send over works.add( new PurgeAllLuceneWork( type ) ); works.add( new PurgeAllLuceneWork( type ) ); works.add( new DeleteLuceneWork( 123L, "123", type ) ); works.add( new DeleteLuceneWork( "Foo", "Bar", type ) ); works.add( new AddLuceneWork( 125, "125", type, new Document() ) ); return works; }
/** * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing. * * @param shirt The shirt to index * * @return A manually create <code>LuceneWork</code> list. */ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) { Document doc = new Document(); Field field = new Field( ProjectionConstants.OBJECT_CLASS, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "id", String.valueOf( shirt.getId() ), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED ); doc.add( field ); DoubleField numField = new DoubleField( "length", shirt.getLength(), Field.Store.NO ); doc.add( numField ); LuceneWork luceneWork = new AddLuceneWork( shirt.getId(), String.valueOf( shirt.getId() ), tshirtType, doc ); List<LuceneWork> queue = new ArrayList<LuceneWork>(); queue.add( luceneWork ); return queue; }
/** * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing. * * @param shirt The shirt to index * * @return A manually create <code>LuceneWork</code> list. */ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) { Document doc = new Document(); Field field = new Field( ProjectionConstants.OBJECT_CLASS, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "id", String.valueOf( shirt.getId() ), Field.Store.YES, Field.Index.ANALYZED ); doc.add( field ); field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED ); doc.add( field ); DoubleField numField = new DoubleField( "length", shirt.getLength(), Field.Store.NO ); doc.add( numField ); LuceneWork luceneWork = new AddLuceneWork( shirt.getId(), String.valueOf( shirt.getId() ), tshirtType, doc ); List<LuceneWork> queue = new ArrayList<LuceneWork>(); queue.add( luceneWork ); return queue; }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<LuceneWork>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); works.add( new OptimizeLuceneWork( remoteTypeId ) ); //won't be send over works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new DeleteByQueryLuceneWork( remoteTypeId, new SingularTermDeletionQuery( "key", "value" ) ) ); works.add( new DeleteLuceneWork( 123L, "123", remoteTypeId ) ); works.add( new DeleteLuceneWork( "Sissi", "Sissi", remoteTypeId ) ); works.add( new DeleteLuceneWork( new URL( "http://emmanuelbernard.com" ), "http://emmanuelbernard.com", remoteTypeId ) ); Document doc = buildDocumentWithNumericFields(); Map<String, String> analyzers = new HashMap<String, String>(); analyzers.put( "godo", "ngram" ); works.add( new AddLuceneWork( 123, "123", remoteTypeId, doc, analyzers ) ); doc = buildDocumentWithMultipleMixedTypeFields(); works.add( new UpdateLuceneWork( 1234, "1234", remoteTypeId, doc ) ); works.add( new AddLuceneWork( 125, "125", remoteTypeId, new Document() ) ); return works; }