/** * <code>required .hadoop.hdfs.LocatedBlockProto block = 2;</code> * * <pre> * Block to be recovered * </pre> */ public Builder mergeBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) { if (blockBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && block_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) { block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder(block_).mergeFrom(value).buildPartial(); } else { block_ = value; } onChanged(); } else { blockBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) return this; if (other.hasB()) { mergeB(other.getB()); if (other.hasOffset()) { setOffset(other.getOffset()); if (other.hasCorrupt()) { setCorrupt(other.getCorrupt()); if (other.hasBlockToken()) { mergeBlockToken(other.getBlockToken()); this.mergeUnknownFields(other.getUnknownFields()); return this;
dataSize = 1 * getIsCachedList().size(); size += dataSize; if (!getIsCachedList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream size += 1 * getStorageIDsList().size(); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
public static LocatedBlockProto convertLocatedBlock(LocatedBlock b) { if (b == null) return null; Builder builder = LocatedBlockProto.newBuilder(); DatanodeInfo[] locs = b.getLocations(); List<DatanodeInfo> cachedLocs =
if (!hasB()) { memoizedIsInitialized = 0; return false; if (!hasOffset()) { memoizedIsInitialized = 0; return false; if (!hasCorrupt()) { memoizedIsInitialized = 0; return false; if (!hasBlockToken()) { memoizedIsInitialized = 0; return false; if (!getB().isInitialized()) { memoizedIsInitialized = 0; return false; for (int i = 0; i < getLocsCount(); i++) { if (!getLocs(i).isInitialized()) { memoizedIsInitialized = 0; return false; if (!getBlockToken().isInitialized()) { memoizedIsInitialized = 0; return false;
public static LocatedBlock convertLocatedBlockProto(LocatedBlockProto proto) { if (proto == null) return null; List<DatanodeInfoProto> locs = proto.getLocsList(); DatanodeInfo[] targets = new DatanodeInfo[locs.size()]; for (int i = 0; i < locs.size(); i++) { proto.getStorageTypesList(), locs.size()); final int storageIDsCount = proto.getStorageIDsCount(); final String[] storageIDs; if (storageIDsCount == 0) { } else { Preconditions.checkState(storageIDsCount == locs.size()); storageIDs = proto.getStorageIDsList() .toArray(new String[storageIDsCount]); if (proto.hasBlockIndices()) { indices = proto.getBlockIndices().toByteArray(); List<Boolean> isCachedList = proto.getIsCachedList(); for (int i=0; i<isCachedList.size(); i++) { if (isCachedList.get(i)) { lb = new LocatedBlock(PBHelperClient.convert(proto.getB()), targets, storageIDs, storageTypes, proto.getOffset(), proto.getCorrupt(), cachedLocs.toArray(new DatanodeInfo[cachedLocs.size()])); } else { lb = new LocatedStripedBlock(PBHelperClient.convert(proto.getB()), targets, storageIDs, storageTypes, indices, proto.getOffset(),
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasB()) { hash = (37 * hash) + B_FIELD_NUMBER; hash = (53 * hash) + getB().hashCode(); if (hasOffset()) { hash = (37 * hash) + OFFSET_FIELD_NUMBER; hash = (53 * hash) + hashLong(getOffset()); if (getLocsCount() > 0) { hash = (37 * hash) + LOCS_FIELD_NUMBER; hash = (53 * hash) + getLocsList().hashCode(); if (hasCorrupt()) { hash = (37 * hash) + CORRUPT_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getCorrupt()); if (hasBlockToken()) { hash = (37 * hash) + BLOCKTOKEN_FIELD_NUMBER; hash = (53 * hash) + getBlockToken().hashCode(); if (getIsCachedCount() > 0) { hash = (37 * hash) + ISCACHED_FIELD_NUMBER; hash = (53 * hash) + getIsCachedList().hashCode(); if (getStorageTypesCount() > 0) { hash = (37 * hash) + STORAGETYPES_FIELD_NUMBER; hash = (53 * hash) + hashEnumList(getStorageTypesList());
public static LocatedBlockProto convert(LocatedBlock b) { if (b == null) return null; Builder builder = LocatedBlockProto.newBuilder(); DatanodeInfo[] locs = b.getLocations(); List<DatanodeInfo> cachedLocs =
result = result && (hasB() == other.hasB()); if (hasB()) { result = result && getB() .equals(other.getB()); result = result && (hasOffset() == other.hasOffset()); if (hasOffset()) { result = result && (getOffset() == other.getOffset()); result = result && getLocsList() .equals(other.getLocsList()); result = result && (hasCorrupt() == other.hasCorrupt()); if (hasCorrupt()) { result = result && (getCorrupt() == other.getCorrupt()); result = result && (hasBlockToken() == other.hasBlockToken()); if (hasBlockToken()) { result = result && getBlockToken() .equals(other.getBlockToken()); result = result && getIsCachedList() .equals(other.getIsCachedList()); result = result && getStorageTypesList() .equals(other.getStorageTypesList()); result = result && getStorageIDsList() .equals(other.getStorageIDsList()); result = result &&
public static LocatedBlockProto convert(LocatedBlock b) { if (b == null) return null; Builder builder = LocatedBlockProto.newBuilder(); DatanodeInfo[] locs = b.getLocations(); List<DatanodeInfo> cachedLocs =
public Builder clear() { super.clear(); fileLength_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); if (blocksBuilder_ == null) { blocks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { blocksBuilder_.clear(); } underConstruction_ = false; bitField0_ = (bitField0_ & ~0x00000004); if (lastBlockBuilder_ == null) { lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance(); } else { lastBlockBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); isLastBlockComplete_ = false; bitField0_ = (bitField0_ & ~0x00000010); if (fileEncryptionInfoBuilder_ == null) { fileEncryptionInfo_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.FileEncryptionInfoProto.getDefaultInstance(); } else { fileEncryptionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, b_); output.writeMessage(5, blockToken_); if (getIsCachedList().size() > 0) { output.writeRawVarint32(50); output.writeRawVarint32(isCachedMemoizedSerializedSize); output.writeBytes(8, storageIDs_.getByteString(i)); getUnknownFields().writeTo(output);
public static LocatedBlock convert(LocatedBlockProto proto) { if (proto == null) return null; List<DatanodeInfoProto> locs = proto.getLocsList(); DatanodeInfo[] targets = new DatanodeInfo[locs.size()]; for (int i = 0; i < locs.size(); i++) { proto.getStorageTypesList(), locs.size()); final int storageIDsCount = proto.getStorageIDsCount(); final String[] storageIDs; if (storageIDsCount == 0) { } else { Preconditions.checkState(storageIDsCount == locs.size()); storageIDs = proto.getStorageIDsList().toArray(new String[storageIDsCount]); List<Boolean> isCachedList = proto.getIsCachedList(); for (int i=0; i<isCachedList.size(); i++) { if (isCachedList.get(i)) { LocatedBlock lb = new LocatedBlock(PBHelper.convert(proto.getB()), targets, storageIDs, storageTypes, proto.getOffset(), proto.getCorrupt(), cachedLocs.toArray(new DatanodeInfo[0])); lb.setBlockToken(PBHelper.convert(proto.getBlockToken()));
/** * <code>required .hadoop.hdfs.LocatedBlockProto block = 2;</code> * * <pre> * Block to be recovered * </pre> */ public Builder mergeBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) { if (blockBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && block_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) { block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder(block_).mergeFrom(value).buildPartial(); } else { block_ = value; } onChanged(); } else { blockBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
public static LocatedBlock convert(LocatedBlockProto proto) { if (proto == null) return null; List<DatanodeInfoProto> locs = proto.getLocsList(); DatanodeInfo[] targets = new DatanodeInfo[locs.size()]; for (int i = 0; i < locs.size(); i++) { proto.getStorageTypesList(), locs.size()); final int storageIDsCount = proto.getStorageIDsCount(); final String[] storageIDs; if (storageIDsCount == 0) { } else { Preconditions.checkState(storageIDsCount == locs.size()); storageIDs = proto.getStorageIDsList().toArray(new String[storageIDsCount]); List<Boolean> isCachedList = proto.getIsCachedList(); for (int i=0; i<isCachedList.size(); i++) { if (isCachedList.get(i)) { LocatedBlock lb = new LocatedBlock(PBHelper.convert(proto.getB()), targets, storageIDs, storageTypes, proto.getOffset(), proto.getCorrupt(), cachedLocs.toArray(new DatanodeInfo[0])); lb.setBlockToken(PBHelper.convert(proto.getBlockToken()));
@Test public void testConvertBlockRecoveryCommand() { DatanodeInfo di1 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo di2 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 }; List<RecoveringBlock> blks = ImmutableList.of( new RecoveringBlock(getExtendedBlock(1), dnInfo, 3), new RecoveringBlock(getExtendedBlock(2), dnInfo, 3) ); BlockRecoveryCommand cmd = new BlockRecoveryCommand(blks); BlockRecoveryCommandProto proto = PBHelper.convert(cmd); assertEquals(1, proto.getBlocks(0).getBlock().getB().getBlockId()); assertEquals(2, proto.getBlocks(1).getBlock().getB().getBlockId()); BlockRecoveryCommand cmd2 = PBHelper.convert(proto); List<RecoveringBlock> cmd2Blks = Lists.newArrayList( cmd2.getRecoveringBlocks()); assertEquals(blks.get(0).getBlock(), cmd2Blks.get(0).getBlock()); assertEquals(blks.get(1).getBlock(), cmd2Blks.get(1).getBlock()); assertEquals(Joiner.on(",").join(blks), Joiner.on(",").join(cmd2Blks)); assertEquals(cmd.toString(), cmd2.toString()); }
/** * <code>required .hadoop.hdfs.LocatedBlockProto block = 2;</code> * * <pre> * Block to be recovered * </pre> */ public Builder clearBlock() { if (blockBuilder_ == null) { block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance(); onChanged(); } else { blockBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /**
/** * <code>optional .hadoop.hdfs.LocatedBlockProto lastBlock = 4;</code> */ public Builder mergeLastBlock(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto value) { if (lastBlockBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && lastBlock_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance()) { lastBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.newBuilder(lastBlock_).mergeFrom(value).buildPartial(); } else { lastBlock_ = value; } onChanged(); } else { lastBlockBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
public Builder toBuilder() { return newBuilder(this); }
public Builder clear() { super.clear(); newGenStamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); if (blockBuilder_ == null) { block_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto.getDefaultInstance(); } else { blockBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (truncateBlockBuilder_ == null) { truncateBlock_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance(); } else { truncateBlockBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; }