public AddLuceneWork createAddWork(String tenantId, IndexedTypeIdentifier entityType, Object entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) { Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>(); Document doc = getDocument( tenantId, entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext, null ); final AddLuceneWork addWork; if ( fieldToAnalyzerMap.isEmpty() ) { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc ); } else { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc, fieldToAnalyzerMap ); } return addWork; }
public AddLuceneWork createAddWork(String tenantId, IndexedTypeIdentifier entityType, Object entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) { Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>(); Document doc = getDocument( tenantId, entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext, null ); final AddLuceneWork addWork; if ( fieldToAnalyzerMap.isEmpty() ) { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc ); } else { addWork = new AddLuceneWork( tenantId, id, idInString, entityType, doc, fieldToAnalyzerMap ); } return addWork; }
private List<LuceneWork> buildLuceneWorks(Document document) { List<LuceneWork> works = new ArrayList<>(); works.add( new AddLuceneWork( 123, "123", remoteTypeId, document ) ); return works; } }
private List<LuceneWork> makeSomeWork() { List<LuceneWork> list = new LinkedList<>(); //just some random data: list.add( new AddLuceneWork( Integer.valueOf( 5 ), "id:5", testTypeId, new Document() ) ); list.add( new AddLuceneWork( Integer.valueOf( 6 ), "id:6", testTypeId, new Document() ) ); return list; }
@Override public void addAddLuceneWork(String entityClassName, Map<String, String> fieldToAnalyzerMap, ConversionContext conversionContext) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); LuceneWork result = new AddLuceneWork( id, objectIdInString( fromName, id, conversionContext ), fromName, getLuceneDocument(), fieldToAnalyzerMap ); results.add( result ); clearDocument(); id = null; }
@Override public void addAddLuceneWork(String entityClassName, Map<String, String> fieldToAnalyzerMap, ConversionContext conversionContext) { final IndexedTypeIdentifier fromName = typesRegistry.keyFromName( entityClassName ); LuceneWork result = new AddLuceneWork( id, objectIdInString( fromName, id, conversionContext ), fromName, getLuceneDocument(), fieldToAnalyzerMap ); results.add( result ); clearDocument(); id = null; }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); IndexedTypeIdentifier type = new PojoIndexedTypeIdentifier( RemoteEntity.class ); works.add( new OptimizeLuceneWork( type ) ); //won't be send over works.add( new PurgeAllLuceneWork( type ) ); works.add( new PurgeAllLuceneWork( type ) ); works.add( new DeleteLuceneWork( 123L, "123", type ) ); works.add( new DeleteLuceneWork( "Foo", "Bar", type ) ); works.add( new AddLuceneWork( 125, "125", type, new Document() ) ); return works; }
@Override public AddLuceneWork cloneOverridingIdString(final AddLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) { final Serializable id = lw.getId(); if (id == null) { //this is serialized work received from a remote node: take the getIdAsString instead final String idInString = lw.getIdInString(); return new AddLuceneWork(idInString, idInString, lw.getEntityType(), lw.getDocument(), lw.getFieldToAnalyzerMap()); } else { return lw; } } }
@Override public AddLuceneWork cloneOverridingIdString(final AddLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) { final Serializable id = lw.getId(); if (id == null) { //this is serialized work received from a remote node: take the getIdAsString instead final String idInString = lw.getIdInString(); return new AddLuceneWork(idInString, idInString, lw.getEntityType(), lw.getDocument(), lw.getFieldToAnalyzerMap()); } else { return lw; } } }
/** * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing. * * @param shirt The shirt to index * * @return A manually create <code>LuceneWork</code> list. */ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) { Document doc = new Document(); Field field = new Field( ProjectionConstants.OBJECT_CLASS, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "id", String.valueOf( shirt.getId() ), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED ); doc.add( field ); DoubleField numField = new DoubleField( "length", shirt.getLength(), Field.Store.NO ); doc.add( numField ); LuceneWork luceneWork = new AddLuceneWork( shirt.getId(), String.valueOf( shirt.getId() ), tshirtType, doc ); List<LuceneWork> queue = new ArrayList<LuceneWork>(); queue.add( luceneWork ); return queue; }
/** * Manually create the work queue. This lists gets send by the Slaves to the Master for indexing. * * @param shirt The shirt to index * * @return A manually create <code>LuceneWork</code> list. */ private List<LuceneWork> createDocumentAndWorkQueue(TShirt shirt) { Document doc = new Document(); Field field = new Field( ProjectionConstants.OBJECT_CLASS, shirt.getClass().getName(), Field.Store.YES, Field.Index.NOT_ANALYZED ); doc.add( field ); field = new Field( "id", String.valueOf( shirt.getId() ), Field.Store.YES, Field.Index.ANALYZED ); doc.add( field ); field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED ); doc.add( field ); DoubleField numField = new DoubleField( "length", shirt.getLength(), Field.Store.NO ); doc.add( numField ); LuceneWork luceneWork = new AddLuceneWork( shirt.getId(), String.valueOf( shirt.getId() ), tshirtType, doc ); List<LuceneWork> queue = new ArrayList<LuceneWork>(); queue.add( luceneWork ); return queue; }
private List<LuceneWork> buildV12Works() throws Exception { List<LuceneWork> works = new ArrayList<>(); Document document = new Document(); document.add( new NumericDocValuesField( "foo", 22L ) ); document.add( new BinaryDocValuesField( "foo", new BytesRef( "world" ) ) ); document.add( new SortedSetDocValuesField( "foo", new BytesRef( "hello" ) ) ); document.add( new SortedDocValuesField( "foo", new BytesRef( "world" ) ) ); works.add( new AddLuceneWork( 123, "123", remoteTypeId, document ) ); return works; }
private List<LuceneWork> buildLuceneWorkList() throws Exception { List<LuceneWork> works = new ArrayList<LuceneWork>(); works.add( OptimizeLuceneWork.INSTANCE ); works.add( OptimizeLuceneWork.INSTANCE ); works.add( new OptimizeLuceneWork( remoteTypeId ) ); //won't be send over works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new PurgeAllLuceneWork( remoteTypeId ) ); works.add( new DeleteByQueryLuceneWork( remoteTypeId, new SingularTermDeletionQuery( "key", "value" ) ) ); works.add( new DeleteLuceneWork( 123L, "123", remoteTypeId ) ); works.add( new DeleteLuceneWork( "Sissi", "Sissi", remoteTypeId ) ); works.add( new DeleteLuceneWork( new URL( "http://emmanuelbernard.com" ), "http://emmanuelbernard.com", remoteTypeId ) ); Document doc = buildDocumentWithNumericFields(); Map<String, String> analyzers = new HashMap<String, String>(); analyzers.put( "godo", "ngram" ); works.add( new AddLuceneWork( 123, "123", remoteTypeId, doc, analyzers ) ); doc = buildDocumentWithMultipleMixedTypeFields(); works.add( new UpdateLuceneWork( 1234, "1234", remoteTypeId, doc ) ); works.add( new AddLuceneWork( 125, "125", remoteTypeId, new Document() ) ); return works; }
analyzers.put( "godo", "ngram" ); works.add( new AddLuceneWork( 123, "123", remoteTypeId, doc, analyzers ) ); works.add( new AddLuceneWork( 125, "125", remoteTypeId, new Document() ) ); return works;