for (BlockProto b : f.getBlocksList()) { out.print("<" + INODE_SECTION_BLOCK + ">"); o(SECTION_ID, b.getBlockId()) .o(INODE_SECTION_GENSTAMP, b.getGenStamp()) .o(INODE_SECTION_NUM_BYTES, b.getNumBytes());
for (BlockProto b : f.getBlocksList()) { out.print("<" + INODE_SECTION_BLOCK + ">"); o(SECTION_ID, b.getBlockId()) .o(INODE_SECTION_GENSTAMP, b.getGenStamp()) .o(INODE_SECTION_NUM_BYTES, b.getNumBytes());
@Override public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock) throws IOException { InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto .newBuilder().setBlock(PBHelper.convert(rBlock)).build(); InitReplicaRecoveryResponseProto resp; try { resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } if (!resp.getReplicaFound()) { // No replica found on the remote node. return null; } else { if (!resp.hasBlock() || !resp.hasState()) { throw new IOException("Replica was found but missing fields. " + "Req: " + req + "\n" + "Resp: " + resp); } } BlockProto b = resp.getBlock(); return new ReplicaRecoveryInfo(b.getBlockId(), b.getNumBytes(), b.getGenStamp(), PBHelper.convert(resp.getState())); }
for (BlockProto block : blk.resolve(s)) { b.addBlocks(block); writeBlock(block.getBlockId(), off, block.getNumBytes(), block.getGenStamp(), pathHandle, out); off += block.getNumBytes();
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto) obj; boolean result = true; result = result && (hasBlockId() == other.hasBlockId()); if (hasBlockId()) { result = result && (getBlockId() == other.getBlockId()); } result = result && (hasGenStamp() == other.hasGenStamp()); if (hasGenStamp()) { result = result && (getGenStamp() == other.getGenStamp()); } result = result && (hasNumBytes() == other.hasNumBytes()); if (hasNumBytes()) { result = result && (getNumBytes() == other.getNumBytes()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@Override public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock) throws IOException { InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto .newBuilder().setBlock(PBHelper.convert(rBlock)).build(); InitReplicaRecoveryResponseProto resp; try { resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } if (!resp.getReplicaFound()) { // No replica found on the remote node. return null; } else { if (!resp.hasBlock() || !resp.hasState()) { throw new IOException("Replica was found but missing fields. " + "Req: " + req + "\n" + "Resp: " + resp); } } BlockProto b = resp.getBlock(); return new ReplicaRecoveryInfo(b.getBlockId(), b.getNumBytes(), b.getGenStamp(), PBHelper.convert(resp.getState())); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBlockId()) { hash = (37 * hash) + BLOCKID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getBlockId()); } if (hasGenStamp()) { hash = (37 * hash) + GENSTAMP_FIELD_NUMBER; hash = (53 * hash) + hashLong(getGenStamp()); } if (hasNumBytes()) { hash = (37 * hash) + NUMBYTES_FIELD_NUMBER; hash = (53 * hash) + hashLong(getNumBytes()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
private void dumpINodeFile(INodeSection.INodeFile f) { o("replication", f.getReplication()).o("mtime", f.getModificationTime()) .o("atime", f.getAccessTime()) .o("perferredBlockSize", f.getPreferredBlockSize()) .o("permission", dumpPermission(f.getPermission())); if (f.getBlocksCount() > 0) { out.print("<blocks>"); for (BlockProto b : f.getBlocksList()) { out.print("<block>"); o("id", b.getBlockId()).o("genstamp", b.getGenStamp()).o("numBytes", b.getNumBytes()); out.print("</block>\n"); } out.print("</blocks>\n"); } if (f.hasFileUC()) { INodeSection.FileUnderConstructionFeature u = f.getFileUC(); out.print("<file-under-construction>"); o("clientName", u.getClientName()).o("clientMachine", u.getClientMachine()); out.print("</file-under-construction>\n"); } }
@Override public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock) throws IOException { InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto .newBuilder().setBlock(PBHelper.convert(rBlock)).build(); InitReplicaRecoveryResponseProto resp; try { resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } if (!resp.getReplicaFound()) { // No replica found on the remote node. return null; } else { if (!resp.hasBlock() || !resp.hasState()) { throw new IOException("Replica was found but missing fields. " + "Req: " + req + "\n" + "Resp: " + resp); } } BlockProto b = resp.getBlock(); return new ReplicaRecoveryInfo(b.getBlockId(), b.getNumBytes(), b.getGenStamp(), PBHelper.convert(resp.getState())); }
private void dumpINodeFile(INodeSection.INodeFile f) { o("replication", f.getReplication()).o("mtime", f.getModificationTime()) .o("atime", f.getAccessTime()) .o("perferredBlockSize", f.getPreferredBlockSize()) .o("permission", dumpPermission(f.getPermission())); if (f.getBlocksCount() > 0) { out.print("<blocks>"); for (BlockProto b : f.getBlocksList()) { out.print("<block>"); o("id", b.getBlockId()).o("genstamp", b.getGenStamp()).o("numBytes", b.getNumBytes()); out.print("</block>\n"); } out.print("</blocks>\n"); } if (f.hasFileUC()) { INodeSection.FileUnderConstructionFeature u = f.getFileUC(); out.print("<file-under-construction>"); o("clientName", u.getClientName()).o("clientMachine", u.getClientMachine()); out.print("</file-under-construction>\n"); } }
public static Block convert(BlockProto b) { return new Block(b.getBlockId(), b.getNumBytes(), b.getGenStamp()); }
public static Block convert(BlockProto b) { return new Block(b.getBlockId(), b.getNumBytes(), b.getGenStamp()); }
public static Block convert(BlockProto b) { return new Block(b.getBlockId(), b.getNumBytes(), b.getGenStamp()); }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance()) return this; if (other.hasBlockId()) { setBlockId(other.getBlockId()); } if (other.hasGenStamp()) { setGenStamp(other.getGenStamp()); } if (other.hasNumBytes()) { setNumBytes(other.getNumBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }