@Override public void acceptUpdate( MultipleIndexPopulator.MultipleIndexUpdater updater, IndexEntryUpdate<?> update, long currentlyIndexedNodeId ) { if ( update.getEntityId() <= currentlyIndexedNodeId ) { updater.process( update ); } }
private long entityIdOf( IndexEntryUpdate<IndexDescriptor> update ) { return update.getEntityId(); }
private static Document updateAsDocument( IndexEntryUpdate<?> update ) { return LuceneDocumentStructure.documentRepresentingProperties( update.getEntityId(), update.values() ); } }
private static Map<Long,Collection<IndexEntryUpdate<?>>> splitPerNode( Collection<IndexEntryUpdate<?>> updates ) { Map<Long,Collection<IndexEntryUpdate<?>>> perNode = new HashMap<>(); updates.forEach( update -> perNode.computeIfAbsent( update.getEntityId(), nodeId -> new ArrayList<>() ).add( update ) ); return perNode; }
@Override public void add( Collection<? extends IndexEntryUpdate<?>> updates ) { try { for ( IndexEntryUpdate<?> update : updates ) { writer.updateDocument( LuceneFulltextDocumentStructure.newTermForChangeOrRemove( update.getEntityId() ), updateAsDocument( update ) ); } } catch ( IOException e ) { throw new UncheckedIOException( e ); } }
private static <KEY extends NativeIndexKey<KEY>, VALUE extends NativeIndexValue> void processRemove( KEY treeKey, IndexEntryUpdate<?> update, Writer<KEY,VALUE> writer ) { // todo Do we need to verify that we actually removed something at all? // todo Difference between online and recovery? initializeKeyFromUpdate( treeKey, update.getEntityId(), update.values() ); writer.remove( treeKey ); }
private Document updateAsDocument( IndexEntryUpdate<?> update ) { return LuceneFulltextDocumentStructure.documentRepresentingProperties( update.getEntityId(), descriptor.propertyNames(), update.values() ); }
private long[] extractEntityIds( IndexEntryUpdate<?>[] updates, Predicate<Value> valueFilter ) { long[] entityIds = new long[updates.length]; int cursor = 0; for ( IndexEntryUpdate<?> update : updates ) { if ( valueFilter.test( update.values()[0] ) ) { entityIds[cursor++] = update.getEntityId(); } } return Arrays.copyOf( entityIds, cursor ); }
void add( IndexEntryUpdate<?> update ) { if ( update.getEntityId() == 2 ) { job.update( IndexEntryUpdate.remove( nodeToDelete, index, valueToDelete ) ); } added.put( update.getEntityId(), update.values()[0].asObjectCopy() ); }
@Override public void includeSample( IndexEntryUpdate<?> update ) { addValueToSample( update.getEntityId(), update.values()[0] ); }
void add( IndexEntryUpdate<?> update ) { if ( update.getEntityId() == 2 ) { job.update( IndexEntryUpdate.change( nodeToChange, index, previousValue, newValue ) ); } added.add( Pair.of( update.getEntityId(), update.values()[0].asObjectCopy() ) ); }
private static <KEY extends NativeIndexKey<KEY>, VALUE extends NativeIndexValue> void processChange( KEY treeKey, VALUE treeValue, IndexEntryUpdate<?> update, Writer<KEY,VALUE> writer, ConflictDetectingValueMerger<KEY,VALUE> conflictDetectingValueMerger ) throws IndexEntryConflictException { // Remove old entry initializeKeyFromUpdate( treeKey, update.getEntityId(), update.beforeValues() ); writer.remove( treeKey ); // Insert new entry initializeKeyFromUpdate( treeKey, update.getEntityId(), update.values() ); treeValue.from( update.values() ); conflictDetectingValueMerger.controlConflictDetection( treeKey ); writer.merge( treeKey, treeValue, conflictDetectingValueMerger ); conflictDetectingValueMerger.checkConflict( update.values() ); }
@Override public void process( IndexEntryUpdate<?> update ) { switch ( update.updateMode() ) { case ADDED: case CHANGED: added.put( update.getEntityId(), update.values()[0].asObjectCopy() ); break; case REMOVED: removed.put( update.getEntityId(), update.values()[0].asObjectCopy() ); // on remove, value is the before value break; default: throw new IllegalArgumentException( update.updateMode().name() ); } }
private static <KEY extends NativeIndexKey<KEY>, VALUE extends NativeIndexValue> void processAdd( KEY treeKey, VALUE treeValue, IndexEntryUpdate<?> update, Writer<KEY,VALUE> writer, ConflictDetectingValueMerger<KEY,VALUE> conflictDetectingValueMerger ) throws IndexEntryConflictException { initializeKeyFromUpdate( treeKey, update.getEntityId(), update.values() ); treeValue.from( update.values() ); conflictDetectingValueMerger.controlConflictDetection( treeKey ); writer.merge( treeKey, treeValue, conflictDetectingValueMerger ); conflictDetectingValueMerger.checkConflict( update.values() ); }
@Override public void process( IndexEntryUpdate<?> update ) { switch ( update.updateMode() ) { case ADDED: case CHANGED: added.add( Pair.of( update.getEntityId(), update.values()[0].asObjectCopy() ) ); break; default: throw new IllegalArgumentException( update.updateMode().name() ); } }
private Hit<KEY,VALUE>[] convertToHits( IndexEntryUpdate<IndexDescriptor>[] updates, Layout<KEY,VALUE> layout ) { List<Hit<KEY,VALUE>> hits = new ArrayList<>( updates.length ); for ( IndexEntryUpdate<IndexDescriptor> u : updates ) { KEY key = layout.newKey(); key.initialize( u.getEntityId() ); for ( int i = 0; i < u.values().length; i++ ) { key.initFromValue( i, u.values()[i], NEUTRAL ); } VALUE value = layout.newValue(); value.from( u.values() ); hits.add( hit( key, value ) ); } return hits.toArray( new Hit[0] ); }
@Test public void testExactMatchOnRandomValues() throws Exception { // given ValueType[] types = randomSetOfSupportedTypes(); List<Value> values = generateValuesFromType( types ); List<IndexEntryUpdate<?>> updates = generateUpdatesFromValues( values ); updateAndCommit( updates ); // when for ( IndexEntryUpdate<?> update : updates ) { // then List<Long> hits = query( IndexQuery.exact( 0, update.values()[0] ) ); assertEquals( hits.toString(), 1, hits.size() ); assertThat( single( hits ), equalTo( update.getEntityId() ) ); } }
@Test public void shouldIndexRemove() throws Exception { // given IndexEntryUpdate<IndexDescriptor>[] updates = someUpdatesSingleType(); processAll( updates ); for ( int i = 0; i < updates.length; i++ ) { // when IndexEntryUpdate<IndexDescriptor> update = updates[i]; IndexEntryUpdate<IndexDescriptor> remove = remove( update.getEntityId(), indexDescriptor, update.values() ); processAll( remove ); forceAndCloseAccessor(); // then verifyUpdates( Arrays.copyOfRange( updates, i + 1, updates.length ) ); setupAccessor(); } }
@Test public void shouldReturnCountZeroForMismatchingData() throws Exception { // given IndexEntryUpdate<IndexDescriptor>[] updates = someUpdatesSingleTypeNoDuplicates(); processAll( updates ); // when IndexReader reader = accessor.newReader(); for ( IndexEntryUpdate<IndexDescriptor> update : updates ) { int[] propKeys = valueCreatorUtil.indexDescriptor.properties(); long countWithMismatchingData = reader.countIndexedNodes( update.getEntityId() + 1, propKeys, update.values() ); long countWithNonExistentEntityId = reader.countIndexedNodes( NON_EXISTENT_ENTITY_ID, propKeys, update.values() ); long countWithNonExistentValue = reader.countIndexedNodes( update.getEntityId(), propKeys, generateUniqueValue( updates ) ); // then assertEquals( 0, countWithMismatchingData ); assertEquals( 0, countWithNonExistentEntityId ); assertEquals( 0, countWithNonExistentValue ); } }
@Test public void shouldIndexChange() throws Exception { // given IndexEntryUpdate<IndexDescriptor>[] updates = someUpdatesSingleType(); processAll( updates ); Iterator<IndexEntryUpdate<IndexDescriptor>> generator = filter( skipExisting( updates ), valueCreatorUtil.randomUpdateGenerator( random ) ); for ( int i = 0; i < updates.length; i++ ) { IndexEntryUpdate<IndexDescriptor> update = updates[i]; Value newValue = generator.next().values()[0]; updates[i] = change( update.getEntityId(), indexDescriptor, update.values()[0], newValue ); } // when processAll( updates ); // then forceAndCloseAccessor(); verifyUpdates( updates ); }