public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.blockPoolId_ = blockPoolId_; if (((bitField0_ & 0x00000004) == 0x00000004)) { blockIds_ = java.util.Collections.unmodifiableList(blockIds_); bitField0_ = (bitField0_ & ~0x00000004); } result.blockIds_ = blockIds_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto) obj; boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { result = result && (getAction() == other.getAction()); } result = result && (hasBlockPoolId() == other.hasBlockPoolId()); if (hasBlockPoolId()) { result = result && getBlockPoolId() .equals(other.getBlockPoolId()); } result = result && getBlockIdsList() .equals(other.getBlockIdsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public static BlockIdCommand convert(BlockIdCommandProto blkIdCmd) { int numBlockIds = blkIdCmd.getBlockIdsCount(); long blockIds[] = new long[numBlockIds]; for (int i = 0; i < numBlockIds; i++) { blockIds[i] = blkIdCmd.getBlockIds(i); } int action = DatanodeProtocol.DNA_UNKNOWN; switch (blkIdCmd.getAction()) { case CACHE: action = DatanodeProtocol.DNA_CACHE; break; case UNCACHE: action = DatanodeProtocol.DNA_UNCACHE; break; default: throw new AssertionError("Unknown action type: " + blkIdCmd.getAction()); } return new BlockIdCommand(action, blkIdCmd.getBlockPoolId(), blockIds); }
blkIdCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance(); } else { blkIdCmdBuilder_.clear();
if (hasBlkIdCmd()) { result = result && getBlkIdCmd() .equals(other.getBlkIdCmd());
if (hasBlkIdCmd()) { result = result && getBlkIdCmd() .equals(other.getBlkIdCmd());
blkIdCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance(); } else { blkIdCmdBuilder_.clear();
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeEnumSize(1, action_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeBytesSize(2, getBlockPoolIdBytes()); } { int dataSize = 0; for (int i = 0; i < blockIds_.size(); i++) { dataSize += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeUInt64SizeNoTag(blockIds_.get(i)); } size += dataSize; if (!getBlockIdsList().isEmpty()) { size += 1; size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } blockIdsMemoizedSerializedSize = dataSize; } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, action_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getBlockPoolIdBytes()); } { int dataSize = 0; for (int i = 0; i < blockIds_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeUInt64SizeNoTag(blockIds_.get(i)); } size += dataSize; if (!getBlockIdsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(dataSize); } blockIdsMemoizedSerializedSize = dataSize; } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto) obj; boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { result = result && (getAction() == other.getAction()); } result = result && (hasBlockPoolId() == other.hasBlockPoolId()); if (hasBlockPoolId()) { result = result && getBlockPoolId() .equals(other.getBlockPoolId()); } result = result && getBlockIdsList() .equals(other.getBlockIdsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto) obj; boolean result = true; result = result && (hasAction() == other.hasAction()); if (hasAction()) { result = result && (getAction() == other.getAction()); } result = result && (hasBlockPoolId() == other.hasBlockPoolId()); if (hasBlockPoolId()) { result = result && getBlockPoolId() .equals(other.getBlockPoolId()); } result = result && getBlockIdsList() .equals(other.getBlockIdsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance()) return this; if (other.hasAction()) { setAction(other.getAction()); } if (other.hasBlockPoolId()) { bitField0_ |= 0x00000002; blockPoolId_ = other.blockPoolId_; onChanged(); } if (!other.blockIds_.isEmpty()) { if (blockIds_.isEmpty()) { blockIds_ = other.blockIds_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureBlockIdsIsMutable(); blockIds_.addAll(other.blockIds_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.blockPoolId_ = blockPoolId_; if (((bitField0_ & 0x00000004) == 0x00000004)) { blockIds_ = java.util.Collections.unmodifiableList(blockIds_); bitField0_ = (bitField0_ & ~0x00000004); } result.blockIds_ = blockIds_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance()) return this; if (other.hasAction()) { setAction(other.getAction()); } if (other.hasBlockPoolId()) { bitField0_ |= 0x00000002; blockPoolId_ = other.blockPoolId_; onChanged(); } if (!other.blockIds_.isEmpty()) { if (blockIds_.isEmpty()) { blockIds_ = other.blockIds_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureBlockIdsIsMutable(); blockIds_.addAll(other.blockIds_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
/** * <code>optional .hadoop.hdfs.datanode.BlockIdCommandProto blkIdCmd = 8;</code> */ public Builder mergeBlkIdCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto value) { if (blkIdCmdBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080) && blkIdCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance()) { blkIdCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.newBuilder(blkIdCmd_).mergeFrom(value).buildPartial(); } else { blkIdCmd_ = value; } onChanged(); } else { blkIdCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /**
/** * <code>optional .hadoop.hdfs.datanode.BlockIdCommandProto blkIdCmd = 8;</code> */ public Builder mergeBlkIdCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto value) { if (blkIdCmdBuilder_ == null) { if (((bitField0_ & 0x00000080) == 0x00000080) && blkIdCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance()) { blkIdCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.newBuilder(blkIdCmd_).mergeFrom(value).buildPartial(); } else { blkIdCmd_ = value; } onChanged(); } else { blkIdCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /**
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasAction()) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getAction()); } if (hasBlockPoolId()) { hash = (37 * hash) + BLOCKPOOLID_FIELD_NUMBER; hash = (53 * hash) + getBlockPoolId().hashCode(); } if (getBlockIdsCount() > 0) { hash = (37 * hash) + BLOCKIDS_FIELD_NUMBER; hash = (53 * hash) + getBlockIdsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.action_ = action_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.blockPoolId_ = blockPoolId_; if (((bitField0_ & 0x00000004) == 0x00000004)) { blockIds_ = java.util.Collections.unmodifiableList(blockIds_); bitField0_ = (bitField0_ & ~0x00000004); } result.blockIds_ = blockIds_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasAction()) { hash = (37 * hash) + ACTION_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getAction()); } if (hasBlockPoolId()) { hash = (37 * hash) + BLOCKPOOLID_FIELD_NUMBER; hash = (53 * hash) + getBlockPoolId().hashCode(); } if (getBlockIdsCount() > 0) { hash = (37 * hash) + BLOCKIDS_FIELD_NUMBER; hash = (53 * hash) + getBlockIdsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public static BlockIdCommand convert(BlockIdCommandProto blkIdCmd) { int numBlockIds = blkIdCmd.getBlockIdsCount(); long blockIds[] = new long[numBlockIds]; for (int i = 0; i < numBlockIds; i++) { blockIds[i] = blkIdCmd.getBlockIds(i); } int action = DatanodeProtocol.DNA_UNKNOWN; switch (blkIdCmd.getAction()) { case CACHE: action = DatanodeProtocol.DNA_CACHE; break; case UNCACHE: action = DatanodeProtocol.DNA_UNCACHE; break; default: throw new AssertionError("Unknown action type: " + blkIdCmd.getAction()); } return new BlockIdCommand(action, blkIdCmd.getBlockPoolId(), blockIds); }