/** * Generates a set of {@link AttributeUpdate}s that set the initial Attributes on a newly create Table Segment. * TODO cleanup as described in https://github.com/pravega/pravega/issues/3273. * * Attributes: * * {@link Attributes#TABLE_INDEX_OFFSET} is initialized to 0. * * {@link Attributes#TABLE_ENTRY_COUNT} is initialized to 0. * * {@link Attributes#TABLE_BUCKET_COUNT} is initialized to 0. * * @return A Collection of {@link AttributeUpdate}s. */ static Collection<AttributeUpdate> getInitialTableAttributes() { return Arrays.asList( new AttributeUpdate(Attributes.TABLE_INDEX_OFFSET, AttributeUpdateType.None, 0L), new AttributeUpdate(Attributes.TABLE_ENTRY_COUNT, AttributeUpdateType.None, 0L), new AttributeUpdate(Attributes.TABLE_BUCKET_COUNT, AttributeUpdateType.None, 0L)); }
/** * Generates an AttributeUpdate that removes a Backpointer, whether it exists or not. * * @param fromOffset The offset at which the Backpointer originates. */ private AttributeUpdate generateBackpointerRemoval(long fromOffset) { return new AttributeUpdate(getBackpointerAttributeKey(fromOffset), AttributeUpdateType.Replace, Attributes.NULL_ATTRIBUTE_VALUE); }
/** * Generates an AttributeUpdate that creates a new or updates an existing Backpointer. * * @param fromOffset The offset at which the Backpointer originates. * @param toOffset The offset at which the Backpointer ends. */ private AttributeUpdate generateBackpointerUpdate(long fromOffset, long toOffset) { return new AttributeUpdate(getBackpointerAttributeKey(fromOffset), AttributeUpdateType.Replace, toOffset); }
private Collection<AttributeUpdate> updateEventNumber(UUID clientId, long eventNum, long previousValue, long eventCount) { return Arrays.asList(new AttributeUpdate(clientId, AttributeUpdateType.ReplaceIfEquals, eventNum, previousValue), new AttributeUpdate(EVENT_COUNT, AttributeUpdateType.Accumulate, eventCount)); }
private Collection<AttributeUpdate> createAttributeUpdates() { return ATTRIBUTES.stream() .map(id -> new AttributeUpdate(id, AttributeUpdateType.Accumulate, 1)) .collect(Collectors.toList()); }
private Collection<AttributeUpdate> createAttributeUpdates(UUID[] attributes) { return Arrays.stream(attributes) .map(a -> new AttributeUpdate(a, AttributeUpdateType.Replace, System.nanoTime())) .collect(Collectors.toList()); }
private Collection<AttributeUpdate> createAttributeUpdates() { return Arrays.stream(ATTRIBUTE_UPDATE_TYPES) .map(ut -> new AttributeUpdate(UUID.randomUUID(), ut, NEXT_ATTRIBUTE_VALUE.get())) .collect(Collectors.toList()); }
/** * Generates conditional {@link AttributeUpdate}s that update the values for Core Attributes representing the indexing * state of the Table Segment. * * @param currentOffset The offset from which this indexing batch began. This will be checked against {@link Attributes#TABLE_INDEX_OFFSET}. * @param newOffset The new offset to set for {@link Attributes#TABLE_INDEX_OFFSET}. * @param update A {@link UpdateInstructions} object to collect updates into. */ private void generateTableAttributeUpdates(long currentOffset, long newOffset, UpdateInstructions update) { // Add an Update for the TABLE_INDEX_OFFSET to indicate we have indexed everything up to this offset. Preconditions.checkArgument(currentOffset <= newOffset, "newOffset must be larger than existingOffset"); update.withAttribute(new AttributeUpdate(Attributes.TABLE_INDEX_OFFSET, AttributeUpdateType.ReplaceIfEquals, newOffset, currentOffset)); // Update Bucket and Entry counts. if (update.getEntryCountDelta() != 0) { update.withAttribute(new AttributeUpdate(Attributes.TABLE_ENTRY_COUNT, AttributeUpdateType.Accumulate, update.getEntryCountDelta())); } if (update.getBucketCountDelta() != 0) { update.withAttribute(new AttributeUpdate(Attributes.TABLE_BUCKET_COUNT, AttributeUpdateType.Accumulate, update.getBucketCountDelta())); } }
static Collection<AttributeUpdate> createAttributes() { val result = new ArrayList<AttributeUpdate>(); long currentValue = 0; for (AttributeUpdateType ut : AttributeUpdateType.values()) { result.add(new AttributeUpdate(UUID.randomUUID(), ut, ++currentValue, currentValue)); } return result; } }
private Collection<AttributeUpdate> generateAttributeUpdates(UpdateableSegmentMetadata segmentMetadata) { long coreAttributeValue = segmentMetadata.getAttributes().getOrDefault(CORE_ATTRIBUTE_ID, 0L) + 1; long extendedAttributeValue = segmentMetadata.getAttributes().getOrDefault(EXTENDED_ATTRIBUTE_ID, 0L) + 13; Collection<AttributeUpdate> attributeUpdates = Arrays.asList( new AttributeUpdate(CORE_ATTRIBUTE_ID, AttributeUpdateType.Accumulate, coreAttributeValue), new AttributeUpdate(EXTENDED_ATTRIBUTE_ID, AttributeUpdateType.Replace, extendedAttributeValue)); segmentMetadata.updateAttributes( attributeUpdates.stream().collect(Collectors.toMap(AttributeUpdate::getAttributeId, AttributeUpdate::getValue))); return attributeUpdates; }
private Collection<AttributeUpdate> createAttributeUpdates(int count) { Collection<AttributeUpdate> result = new ArrayList<>(count); for (int i = 0; i < count; i++) { boolean isCore = i % 2 == 0; UUID id = isCore ? new UUID(Long.MIN_VALUE, i) : UUID.randomUUID(); AttributeUpdateType ut = AttributeUpdateType.values()[i % AttributeUpdateType.values().length]; result.add(new AttributeUpdate(id, ut, i, i)); } return result; }
/** * Generates one or more {@link AttributeUpdate}s that will delete a {@link TableBucket}. * * @param bucket The {@link TableBucket} to delete. * @param update A {@link UpdateInstructions} object to collect updates into. */ private void generateBucketDelete(TableBucket bucket, UpdateInstructions update) { if (bucket.exists()) { update.withAttribute(new AttributeUpdate(bucket.getHash(), AttributeUpdateType.Replace, Attributes.NULL_ATTRIBUTE_VALUE)); update.bucketRemoved(); } }
private CompletableFuture<Void> storeAppend(Append append) { long lastEventNumber; synchronized (lock) { lastEventNumber = latestEventNumbers.get(Pair.of(append.getSegment(), append.getWriterId())); } List<AttributeUpdate> attributes = Arrays.asList( new AttributeUpdate(append.getWriterId(), AttributeUpdateType.ReplaceIfEquals, append.getEventNumber(), lastEventNumber), new AttributeUpdate(EVENT_COUNT, AttributeUpdateType.Accumulate, append.getEventCount())); ByteBuf buf = append.getData().asReadOnly(); byte[] bytes = new byte[buf.readableBytes()]; buf.readBytes(bytes); if (append.isConditional()) { return store.append(append.getSegment(), append.getExpectedLength(), bytes, attributes, TIMEOUT); } else { return store.append(append.getSegment(), bytes, attributes, TIMEOUT); } }
/** * Generates one or more {@link AttributeUpdate}s that will create or update the necessary Table Buckets entries * in the Segment's Extended Attributes. * * @param bucket The Bucket to create or update. * @param bucketOffset The Bucket's new offset. * @param update A {@link UpdateInstructions} object to collect updates into. */ private void generateBucketUpdate(TableBucket bucket, long bucketOffset, UpdateInstructions update) { assert bucketOffset >= 0; update.withAttribute(new AttributeUpdate(bucket.getHash(), AttributeUpdateType.Replace, bucketOffset)); if (!bucket.exists()) { update.bucketAdded(); } }
private AttributeUpdate readAttributeUpdate00(RevisionDataInput source) throws IOException { return new AttributeUpdate( source.readUUID(), AttributeUpdateType.get(source.readByte()), source.readLong(), source.readLong()); } }
private void initializeSegment() { // Populate table-related attributes. this.segmentMock.updateAttributes(TableStore.getInitialTableAttributes(), TIMEOUT).join(); // Pre-populate the TABLE_INDEX_OFFSET. this.segmentMock.updateAttributes( Collections.singleton(new AttributeUpdate(Attributes.TABLE_INDEX_OFFSET, AttributeUpdateType.Replace, INITIAL_LAST_INDEXED_OFFSET)), TIMEOUT).join(); this.segmentMock.append(new byte[(int) INITIAL_LAST_INDEXED_OFFSET], null, TIMEOUT).join(); }
private AttributeUpdate readAttributeUpdate00(RevisionDataInput source) throws IOException { return new AttributeUpdate( source.readUUID(), AttributeUpdateType.get(source.readByte()), source.readLong(), source.readLong()); } }
private UpdateAttributesOperation generateUpdateAttributesAndUpdateMetadata(long segmentId, TestContext context) { UpdateableSegmentMetadata segmentMetadata = context.containerMetadata.getStreamSegmentMetadata(segmentId); long coreAttributeValue = segmentMetadata.getAttributes().getOrDefault(CORE_ATTRIBUTE_ID, 0L) + 1; long extendedAttributeValue = segmentMetadata.getAttributes().getOrDefault(EXTENDED_ATTRIBUTE_ID, 0L) + 13; Collection<AttributeUpdate> attributeUpdates = Arrays.asList( new AttributeUpdate(CORE_ATTRIBUTE_ID, AttributeUpdateType.Accumulate, coreAttributeValue), new AttributeUpdate(EXTENDED_ATTRIBUTE_ID, AttributeUpdateType.Replace, extendedAttributeValue)); segmentMetadata.updateAttributes( attributeUpdates.stream().collect(Collectors.toMap(AttributeUpdate::getAttributeId, AttributeUpdate::getValue))); UpdateAttributesOperation op = new UpdateAttributesOperation(segmentId, attributeUpdates); op.setSequenceNumber(context.containerMetadata.nextOperationSequenceNumber()); return op; }
private void recordAppend(long segmentId, int length, OperationMetadataUpdater updater, UpdateableContainerMetadata referenceMetadata) throws Exception { byte[] data = new byte[length]; val attributeUpdates = Arrays.asList( new AttributeUpdate(Attributes.CREATION_TIME, AttributeUpdateType.Replace, NEXT_ATTRIBUTE_VALUE.get()), new AttributeUpdate(Attributes.EVENT_COUNT, AttributeUpdateType.Accumulate, NEXT_ATTRIBUTE_VALUE.get())); val op = new StreamSegmentAppendOperation(segmentId, data, attributeUpdates); process(op, updater); if (referenceMetadata != null) { val rsm = referenceMetadata.getStreamSegmentMetadata(segmentId); rsm.setLength(rsm.getLength() + length); val attributes = new HashMap<UUID, Long>(); op.getAttributeUpdates().forEach(au -> attributes.put(au.getAttributeId(), au.getValue())); rsm.updateAttributes(attributes); } }
@Override public void updateSegmentPolicy(UpdateSegmentPolicy updateSegmentPolicy) { final String operation = "updateSegmentPolicy"; if (!verifyToken(updateSegmentPolicy.getSegment(), updateSegmentPolicy.getRequestId(), updateSegmentPolicy.getDelegationToken(), operation)) { return; } Collection<AttributeUpdate> attributes = Arrays.asList( new AttributeUpdate(SCALE_POLICY_TYPE, AttributeUpdateType.Replace, (long) updateSegmentPolicy.getScaleType()), new AttributeUpdate(SCALE_POLICY_RATE, AttributeUpdateType.Replace, updateSegmentPolicy.getTargetRate())); log.info(updateSegmentPolicy.getRequestId(), "Updating segment policy {} ", updateSegmentPolicy); segmentStore.updateAttributes(updateSegmentPolicy.getSegment(), attributes, TIMEOUT) .thenRun(() -> connection.send(new SegmentPolicyUpdated(updateSegmentPolicy.getRequestId(), updateSegmentPolicy.getSegment()))) .whenComplete((r, e) -> { if (e != null) { handleException(updateSegmentPolicy.getRequestId(), updateSegmentPolicy.getSegment(), operation, e); } else { if (statsRecorder != null) { statsRecorder.policyUpdate(updateSegmentPolicy.getSegment(), updateSegmentPolicy.getScaleType(), updateSegmentPolicy.getTargetRate()); } } }); }