Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) { maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(), storeSeqId.getSequenceId());
private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId( Map<byte[], Long> ids) { return ids.entrySet().stream() .map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey())) .setSequenceId(e.getValue()) .build()) .collect(Collectors.toList()); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getSequenceId()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
assertEquals(HConstants.NO_SEQNUM, ids.getLastFlushedSequenceId()); long storeSequenceId = ids.getStoreSequenceId(0).getSequenceId(); assertTrue(storeSequenceId > 0); testUtil.getAdmin().flush(tableName); assertTrue(ids.getLastFlushedSequenceId() + " > " + storeSequenceId, ids.getLastFlushedSequenceId() > storeSequenceId); assertEquals(ids.getLastFlushedSequenceId(), ids.getStoreSequenceId(0).getSequenceId()); table.close();
public static RegionMetrics toRegionMetrics(ClusterStatusProtos.RegionLoad regionLoadPB) { return RegionMetricsBuilder .newBuilder(regionLoadPB.getRegionSpecifier().getValue().toByteArray()) .setBloomFilterSize(new Size(regionLoadPB.getTotalStaticBloomSizeKB(), Size.Unit.KILOBYTE)) .setCompactedCellCount(regionLoadPB.getCurrentCompactedKVs()) .setCompactingCellCount(regionLoadPB.getTotalCompactingKVs()) .setCompletedSequenceId(regionLoadPB.getCompleteSequenceId()) .setDataLocality(regionLoadPB.hasDataLocality() ? regionLoadPB.getDataLocality() : 0.0f) .setFilteredReadRequestCount(regionLoadPB.getFilteredReadRequestsCount()) .setStoreFileUncompressedDataIndexSize(new Size(regionLoadPB.getTotalStaticIndexSizeKB(), Size.Unit.KILOBYTE)) .setLastMajorCompactionTimestamp(regionLoadPB.getLastMajorCompactionTs()) .setMemStoreSize(new Size(regionLoadPB.getMemStoreSizeMB(), Size.Unit.MEGABYTE)) .setReadRequestCount(regionLoadPB.getReadRequestsCount()) .setWriteRequestCount(regionLoadPB.getWriteRequestsCount()) .setStoreFileIndexSize(new Size(regionLoadPB.getStorefileIndexSizeKB(), Size.Unit.KILOBYTE)) .setStoreFileRootLevelIndexSize(new Size(regionLoadPB.getRootIndexSizeKB(), Size.Unit.KILOBYTE)) .setStoreCount(regionLoadPB.getStores()) .setStoreFileCount(regionLoadPB.getStorefiles()) .setStoreFileSize(new Size(regionLoadPB.getStorefileSizeMB(), Size.Unit.MEGABYTE)) .setStoreSequenceIds(regionLoadPB.getStoreCompleteSequenceIdList().stream() .collect(Collectors.toMap( (ClusterStatusProtos.StoreSequenceId s) -> s.getFamilyName().toByteArray(), ClusterStatusProtos.StoreSequenceId::getSequenceId))) .setUncompressedStoreFileSize( new Size(regionLoadPB.getStoreUncompressedSizeMB(),Size.Unit.MEGABYTE)) .build(); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getSequenceId()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasSequenceId() == other.hasSequenceId()); if (hasSequenceId()) { result = result && (getSequenceId() == other.getSequenceId()); } result = result && unknownFields.equals(other.unknownFields); return result; }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() {
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; return false; } if (!hasSequenceId()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; }
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId buildPartial() { org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.sequenceId_ = sequenceId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance(); }
/** * @return completed sequence id per store. * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link #getStoreSequenceId} instead. */ @Deprecated public List<ClusterStatusProtos.StoreSequenceId> getStoreCompleteSequenceId() { return metrics.getStoreSequenceId().entrySet().stream() .map(s -> ClusterStatusProtos.StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(s.getKey())) .setSequenceId(s.getValue()) .build()) .collect(Collectors.toList()); }
/** * <pre> ** the most recent sequence Id of store from cache flush * </pre> * * <code>repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder() { return getStoreCompleteSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
/** * <code>repeated .hbase.pb.StoreSequenceId store_sequence_id = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreSequenceIdBuilder() { return getStoreSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
/** * <pre> ** the last WAL sequence id flushed from MemStore to HFile for stores of the region * </pre> * * <code>repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder() { return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
/** * <pre> ** the last WAL sequence id flushed from MemStore to HFile for stores of the region * </pre> * * <code>repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder( int index) { return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()) return this; if (other.hasFamilyName()) { setFamilyName(other.getFamilyName()); } if (other.hasSequenceId()) { setSequenceId(other.getSequenceId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; }
/** * <pre> ** the most recent sequence Id of store from cache flush * </pre> * * <code>repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder( int index) { return getStoreCompleteSequenceIdFieldBuilder().addBuilder( index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
/** * <code>required bytes family_name = 1;</code> */ public Builder clearFamilyName() { bitField0_ = (bitField0_ & ~0x00000001); familyName_ = getDefaultInstance().getFamilyName(); onChanged(); return this; }
private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId( Map<byte[], Long> ids) { return ids.entrySet().stream() .map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey())) .setSequenceId(e.getValue()) .build()) .collect(Collectors.toList()); }