public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto(this); int from_bitField0_ = bitField0_; if (blockECReconstructioninfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001)) { blockECReconstructioninfo_ = java.util.Collections.unmodifiableList(blockECReconstructioninfo_); bitField0_ = (bitField0_ & ~0x00000001); } result.blockECReconstructioninfo_ = blockECReconstructioninfo_; } else { result.blockECReconstructioninfo_ = blockECReconstructioninfoBuilder_.build(); } onBuilt(); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (getBlockECReconstructioninfoCount() > 0) { hash = (37 * hash) + BLOCKECRECONSTRUCTIONINFO_FIELD_NUMBER; hash = (53 * hash) + getBlockECReconstructioninfoList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
blkECReconstructionCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance(); } else { blkECReconstructionCmdBuilder_.clear();
if (hasBlkECReconstructionCmd()) { result = result && getBlkECReconstructionCmd() .equals(other.getBlkECReconstructionCmd());
hash = (53 * hash) + getBlkECReconstructionCmd().hashCode();
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance()) return this; if (blockECReconstructioninfoBuilder_ == null) { if (!other.blockECReconstructioninfo_.isEmpty()) { this.mergeUnknownFields(other.getUnknownFields()); return this;
/** * <code>optional .hadoop.hdfs.datanode.BlockECReconstructionCommandProto blkECReconstructionCmd = 9;</code> */ public Builder mergeBlkECReconstructionCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto value) { if (blkECReconstructionCmdBuilder_ == null) { if (((bitField0_ & 0x00000100) == 0x00000100) && blkECReconstructionCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance()) { blkECReconstructionCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.newBuilder(blkECReconstructionCmd_).mergeFrom(value).buildPartial(); } else { blkECReconstructionCmd_ = value; } onChanged(); } else { blkECReconstructionCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000100; return this; } /**
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto) obj; boolean result = true; result = result && getBlockECReconstructioninfoList() .equals(other.getBlockECReconstructioninfoList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
/** * <code>optional .hadoop.hdfs.datanode.BlockECReconstructionCommandProto blkECReconstructionCmd = 9;</code> */ public Builder clearBlkECReconstructionCmd() { if (blkECReconstructionCmdBuilder_ == null) { blkECReconstructionCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance(); onChanged(); } else { blkECReconstructionCmdBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000100); return this; } /**
public static BlockECReconstructionCommand convert( BlockECReconstructionCommandProto blkECReconstructionCmdProto) { Collection<BlockECReconstructionInfo> blkECReconstructionInfos = new ArrayList<>(); List<BlockECReconstructionInfoProto> blkECRInfoList = blkECReconstructionCmdProto.getBlockECReconstructioninfoList(); for (BlockECReconstructionInfoProto blkECRInfoProto : blkECRInfoList) { blkECReconstructionInfos .add(convertBlockECReconstructionInfo(blkECRInfoProto)); } return new BlockECReconstructionCommand( DatanodeProtocol.DNA_ERASURE_CODING_RECONSTRUCTION, blkECReconstructionInfos); }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto getDefaultInstanceForType() { return org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance(); }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; for (int i = 0; i < blockECReconstructioninfo_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, blockECReconstructioninfo_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; for (int i = 0; i < getBlockECReconstructioninfoCount(); i++) { if (!getBlockECReconstructioninfo(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); for (int i = 0; i < blockECReconstructioninfo_.size(); i++) { output.writeMessage(1, blockECReconstructioninfo_.get(i)); } getUnknownFields().writeTo(output); }
private void initFields() { cmdType_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeCommandProto.Type.BalancerBandwidthCommand; balancerCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BalancerBandwidthCommandProto.getDefaultInstance(); blkCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockCommandProto.getDefaultInstance(); recoveryCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockRecoveryCommandProto.getDefaultInstance(); finalizeCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.FinalizeCommandProto.getDefaultInstance(); keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance(); registerCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.RegisterCommandProto.getDefaultInstance(); blkIdCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockIdCommandProto.getDefaultInstance(); blkECReconstructionCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance(); } private byte memoizedIsInitialized = -1;