public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); if (other.hasTableName()) { setTableName(other.getTableName()); if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); if (other.hasNonce()) { setNonce(other.getNonce()); if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); this.mergeUnknownFields(other.getUnknownFields());
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj; boolean result = true; result = result && (hasKey() == other.hasKey()); if (hasKey()) { result = result && getKey() .equals(other.getKey()); } result = result && getKeyValueBytesList() .equals(other.getKeyValueBytesList()); result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount()); if (hasAssociatedCellCount()) { result = result && (getAssociatedCellCount() == other.getAssociatedCellCount()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
ByteString regionName = entries.get(0).getKey().getEncodedRegionName(); Region region = regionServer.getRegionByEncodedName(regionName.toStringUtf8()); RegionCoprocessorHost coprocessorHost = if (!regionName.equals(entry.getKey().getEncodedRegionName())) { throw new NotServingRegionException("Replay request contains entries from multiple " + "regions. First region:" + regionName.toStringUtf8() + " , other region:" + entry.getKey().getEncodedRegionName()); long nonceGroup = entry.getKey().hasNonceGroup() ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE; long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE; regionServer.nonceManager.reportOperationFromWal( nonceGroup, nonce, entry.getKey().getWriteTime()); long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ? entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber(); OperationStatus[] result = doReplayBatchOp(region, edits, replaySeqId);
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); if (other.hasTableName()) { setTableName(other.getTableName()); if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); if (other.hasNonce()) { setNonce(other.getNonce()); if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); this.mergeUnknownFields(other.getUnknownFields());
long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ? entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber(); int count = entry.getAssociatedCellCount(); List<MutationReplay> mutations = new ArrayList<MutationReplay>(); m = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); long nonceGroup = entry.getKey().hasNonceGroup() ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE; long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE; mutations.add(new MutationReplay(MutationType.PUT, m, nonceGroup, nonce)); List<UUID> clusterIds = new ArrayList<UUID>(walKeyProto.getClusterIdsCount()); for (HBaseProtos.UUID uuid : entry.getKey().getClusterIdsList()) { clusterIds.add(new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits())); key = new HLogKey(walKeyProto.getEncodedRegionName().toByteArray(), TableName.valueOf( walKeyProto.getTableName().toByteArray()), replaySeqId, walKeyProto.getWriteTime(), clusterIds, walKeyProto.getNonceGroup(), walKeyProto.getNonce(), null); logEntry.setFirst(key); logEntry.setSecond(val);
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); if (other.hasTableName()) { setTableName(other.getTableName()); if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); if (other.hasNonce()) { setNonce(other.getNonce()); if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); this.mergeUnknownFields(other.getUnknownFields());
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; if (other.hasEncodedRegionName()) { setEncodedRegionName(other.getEncodedRegionName()); if (other.hasTableName()) { setTableName(other.getTableName()); if (other.hasLogSequenceNumber()) { setLogSequenceNumber(other.getLogSequenceNumber()); if (other.hasWriteTime()) { setWriteTime(other.getWriteTime()); if (other.hasClusterId()) { mergeClusterId(other.getClusterId()); if (other.hasFollowingKvCount()) { setFollowingKvCount(other.getFollowingKvCount()); if (other.hasNonceGroup()) { setNonceGroup(other.getNonceGroup()); if (other.hasNonce()) { setNonce(other.getNonce()); if (other.hasOrigSequenceNumber()) { setOrigSequenceNumber(other.getOrigSequenceNumber()); this.mergeUnknownFields(other.getUnknownFields());
for (WALEntry entry : entries) { TableName table = TableName.valueOf(entry.getKey().getTableName().toByteArray()); Cell previousCell = null; Mutation m = null; new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); List<UUID> clusterIds = new ArrayList<UUID>(); for(HBaseProtos.UUID clusterId : entry.getKey().getClusterIdsList()){ clusterIds.add(toUUID(clusterId)); this.metrics.setAgeOfLastAppliedOp(entries.get(size - 1).getKey().getWriteTime()); this.metrics.applyBatch(size); this.totalReplicatedEdits.addAndGet(totalReplicated);
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); result = result && (hasWriteTime() == other.hasWriteTime()); if (hasWriteTime()) { result = result && (getWriteTime() == other.getWriteTime()); result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { result = result && getClusterId() .equals(other.getClusterId()); result = result && getScopesList() .equals(other.getScopesList()); result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); if (hasFollowingKvCount()) {
result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); result = result && (hasWriteTime() == other.hasWriteTime()); if (hasWriteTime()) { result = result && (getWriteTime() == other.getWriteTime()); result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { result = result && getClusterId() .equals(other.getClusterId()); result = result && getScopesList() .equals(other.getScopesList()); result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); if (hasFollowingKvCount()) {
result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); if (hasEncodedRegionName()) { result = result && getEncodedRegionName() .equals(other.getEncodedRegionName()); result = result && (hasTableName() == other.hasTableName()); if (hasTableName()) { result = result && getTableName() .equals(other.getTableName()); result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); if (hasLogSequenceNumber()) { result = result && (getLogSequenceNumber() == other.getLogSequenceNumber()); result = result && (hasWriteTime() == other.hasWriteTime()); if (hasWriteTime()) { result = result && (getWriteTime() == other.getWriteTime()); result = result && (hasClusterId() == other.hasClusterId()); if (hasClusterId()) { result = result && getClusterId() .equals(other.getClusterId()); result = result && getScopesList() .equals(other.getScopesList()); result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); if (hasFollowingKvCount()) {
TableName tableName = (entry.getKey().getWriteTime() < subscriptionTimestamp) ? null : TableName.valueOf(entry.getKey().getTableName().toByteArray()); Multimap<ByteBuffer, Cell> keyValuesPerRowKey = ArrayListMultimap.create(); final Map<ByteBuffer, byte[]> payloadPerRowKey = Maps.newHashMap(); payloadPerRowKey.get(rowKeyBuffer)); eventExecutor.scheduleSepEvent(sepEvent); lastProcessedTimestamp = Math.max(lastProcessedTimestamp, entry.getKey().getWriteTime());
TableName tableName = (entry.getKey().getWriteTime() < subscriptionTimestamp) ? null : TableName.valueOf(entry.getKey().getTableName().toByteArray()); Multimap<ByteBuffer, KeyValue> keyValuesPerRowKey = ArrayListMultimap.create(); final Map<ByteBuffer, byte[]> payloadPerRowKey = Maps.newHashMap(); payloadPerRowKey.get(rowKeyBuffer)); eventExecutor.scheduleSepEvent(sepEvent); lastProcessedTimestamp = Math.max(lastProcessedTimestamp, entry.getKey().getWriteTime());
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLogSequenceNumber()); if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getWriteTime()); if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); if (getScopesCount() > 0) { hash = (37 * hash) + SCOPES_FIELD_NUMBER; hash = (53 * hash) + getScopesList().hashCode(); if (hasFollowingKvCount()) { hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER; hash = (53 * hash) + getFollowingKvCount();
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLogSequenceNumber()); if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getWriteTime()); if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); if (getScopesCount() > 0) { hash = (37 * hash) + SCOPES_FIELD_NUMBER; hash = (53 * hash) + getScopesList().hashCode(); if (hasFollowingKvCount()) { hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER; hash = (53 * hash) + getFollowingKvCount();
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasEncodedRegionName()) { hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER; hash = (53 * hash) + getEncodedRegionName().hashCode(); if (hasTableName()) { hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; hash = (53 * hash) + getTableName().hashCode(); if (hasLogSequenceNumber()) { hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLogSequenceNumber()); if (hasWriteTime()) { hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getWriteTime()); if (hasClusterId()) { hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER; hash = (53 * hash) + getClusterId().hashCode(); if (getScopesCount() > 0) { hash = (37 * hash) + SCOPES_FIELD_NUMBER; hash = (53 * hash) + getScopesList().hashCode(); if (hasFollowingKvCount()) { hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER; hash = (53 * hash) + getFollowingKvCount();