@Override long recordId() { return record.getId(); }
void add( R record ) { map.put( record.getId(), record ); }
public BaseCommand( RECORD before, RECORD after ) { setup( after.getId(), Mode.fromRecordState( after ) ); this.before = before; this.after = after; }
private void track( RecordStore<?> store, AbstractBaseRecord record ) { long id = max( record.getId(), record.requiresSecondaryUnit() ? record.getSecondaryUnitId() : -1 ); HighId highId = highIds.get( store ); if ( highId == null ) { highIds.put( store, new HighId( id ) ); } else { highId.track( id ); } }
@Override public T load( long key, E additionalData ) { return records.stream().filter( r -> r.getId() == key ).findFirst().get(); }
private static <R extends AbstractBaseRecord> R add( Map<Long, Delta<R>> records, R record ) { records.put( record.getId(), new Delta<>( record ) ); return record; }
String diff( Iterable<? extends AbstractBaseRecord> actual ) { Set<Long> seen = new HashSet<>( expectedById.keySet() ); for ( AbstractBaseRecord record : actual ) { seen.remove( record.getId() ); if ( !expectedById.containsKey( record.getId() ) ) { return String.format( "This record was not expected: %s", record ); } String diff = diff( expectedById.get( record.getId() ), record ); if ( diff != null ) { return diff; } } return null; }
private static <R extends AbstractBaseRecord> void add( Map<Long, Delta<R>> records, R oldRecord, R newRecord ) { records.put( newRecord.getId(), new Delta<>( oldRecord, newRecord ) ); }
@Override protected void process( RECORD[] batch, BatchSender sender ) { LongFunction<IdSequence> idSequence = prepareIdSequence.apply( store ); int recordsUpdatedInThisBatch = 0; for ( RECORD record : batch ) { if ( record != null && record.inUse() && !IdValidator.isReservedId( record.getId() ) ) { store.prepareForCommit( record, idSequence.apply( record.getId() ) ); store.updateRecord( record ); recordsUpdatedInThisBatch++; } } recordsUpdated.add( recordsUpdatedInThisBatch ); }
.append( '[' ).append( record.getId() ).append( ']' );
private <R extends AbstractBaseRecord> void apply( RecordStore<R> store, ProgressListener progressListener, Predicate<? super R>... filters ) throws FAILURE { ResourceIterable<R> iterable = Scanner.scan( store, true, filters ); try ( ResourceIterator<R> scan = iterable.iterator() ) { while ( scan.hasNext() ) { R record = scan.next(); if ( shouldStop ) { break; } store.accept( this, record ); progressListener.set( record.getId() ); } progressListener.done(); } } }
@Override public void nextRecordByCursor( RECORD record, RecordLoad mode, PageCursor cursor ) throws UnderlyingStorageException { if ( cursor.getCurrentPageId() < -1 ) { throw new IllegalArgumentException( "Pages are assumed to be positive or -1 if not initialized" ); } try { int offset = cursor.getOffset(); long id = record.getId() + 1; record.setId( id ); long pageId = cursor.getCurrentPageId(); if ( offset >= pagedFile.pageSize() || pageId < 0 ) { if ( !cursor.next() ) { verifyAfterNotRead( record, mode ); return; } cursor.setOffset( 0 ); } readRecordFromPage( id, record, mode, cursor ); } catch ( IOException e ) { throw new UnderlyingStorageException( e ); } }
private <R extends AbstractBaseRecord> void writeRecord( R record, RecordFormat<R> format, PagedFile storeFile, int recordSize, BatchingIdSequence idSequence ) throws IOException { try ( PageCursor cursor = storeFile.io( 0, PagedFile.PF_SHARED_WRITE_LOCK ) ) { assertedNext( cursor ); if ( record.inUse() ) { format.prepare( record, recordSize, idSequence ); } int offset = Math.toIntExact( record.getId() * recordSize ); cursor.setOffset( offset ); format.write( record, cursor, recordSize ); assertWithinBounds( record, cursor, "writing" ); } }
@Override public void updateRecord( RECORD record ) long id = record.getId(); IdValidator.assertValidId( getIdType(), id, recordFormat.getMaxId() );
public BaseCommand( RECORD before, RECORD after ) { setup( after.getId(), Mode.fromRecordState( after ) ); this.before = before; this.after = after; }
private void track( RecordStore<?> store, AbstractBaseRecord record ) { long id = max( record.getId(), record.requiresSecondaryUnit() ? record.getSecondaryUnitId() : -1 ); HighId highId = highIds.get( store ); if ( highId == null ) { highIds.put( store, new HighId( id ) ); } else { highId.track( id ); } }
@Override protected void process( RECORD[] batch, BatchSender sender ) { LongFunction<IdSequence> idSequence = prepareIdSequence.apply( store ); int recordsUpdatedInThisBatch = 0; for ( RECORD record : batch ) { if ( record != null && record.inUse() && !IdValidator.isReservedId( record.getId() ) ) { store.prepareForCommit( record, idSequence.apply( record.getId() ) ); store.updateRecord( record ); recordsUpdatedInThisBatch++; } } recordsUpdated.add( recordsUpdatedInThisBatch ); }
private <R extends AbstractBaseRecord> void apply( RecordStore<R> store, ProgressListener progressListener, Predicate<? super R>... filters ) throws FAILURE { ResourceIterable<R> iterable = Scanner.scan( store, true, filters ); try ( ResourceIterator<R> scan = iterable.iterator() ) { while ( scan.hasNext() ) { R record = scan.next(); if ( shouldStop ) { break; } store.accept( this, record ); progressListener.set( record.getId() ); } progressListener.done(); } } }
@Override public void updateRecord( RECORD record ) long id = record.getId(); IdValidator.assertValidId( getIdType(), id, recordFormat.getMaxId() );