private void writeSuccessWithChecksumInfo(BlockSender blockSender, DataOutputStream out) throws IOException { ReadOpChecksumInfoProto ckInfo = ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(blockSender.getChecksum())) .setChunkOffset(blockSender.getOffset()) .build(); BlockOpResponseProto response = BlockOpResponseProto.newBuilder() .setStatus(SUCCESS) .setReadOpChecksumInfo(ckInfo) .build(); response.writeDelimitedTo(out); out.flush(); }
subBuilder.mergeFrom(readOpChecksumInfo_); readOpChecksumInfo_ = subBuilder.buildPartial();
/** * <code>required .hadoop.hdfs.ChecksumProto checksum = 1;</code> */ public org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder getChecksumBuilder() { bitField0_ |= 0x00000001; onChanged(); return getChecksumFieldBuilder().getBuilder(); } /**
.setStatus(Status.SUCCESS) .setReadOpChecksumInfo(ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(DEFAULT_CHECKSUM)) .setChunkOffset(0L)) .build() .writeDelimitedTo(recvOut);
subBuilder.mergeFrom(readOpChecksumInfo_); readOpChecksumInfo_ = subBuilder.buildPartial();
subBuilder.mergeFrom(readOpChecksumInfo_); readOpChecksumInfo_ = subBuilder.buildPartial();
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>required .hadoop.hdfs.ChecksumProto checksum = 1;</code> */ private io.prestosql.hadoop.$internal.com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder> getChecksumFieldBuilder() { if (checksumBuilder_ == null) { checksumBuilder_ = new io.prestosql.hadoop.$internal.com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder>( checksum_, getParentForChildren(), isClean()); checksum_ = null; } return checksumBuilder_; }
/** * <code>required .hadoop.hdfs.ChecksumProto checksum = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder> getChecksumFieldBuilder() { if (checksumBuilder_ == null) { checksumBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder>( checksum_, getParentForChildren(), isClean()); checksum_ = null; } return checksumBuilder_; }
/** * <code>required .hadoop.hdfs.ChecksumProto checksum = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder> getChecksumFieldBuilder() { if (checksumBuilder_ == null) { checksumBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto.Builder, org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProtoOrBuilder>( checksum_, getParentForChildren(), isClean()); checksum_ = null; } return checksumBuilder_; }
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder setReadOpChecksumInfo( org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.Builder builderForValue) { if (readOpChecksumInfoBuilder_ == null) { readOpChecksumInfo_ = builderForValue.build(); onChanged(); } else { readOpChecksumInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder setReadOpChecksumInfo( org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.Builder builderForValue) { if (readOpChecksumInfoBuilder_ == null) { readOpChecksumInfo_ = builderForValue.build(); onChanged(); } else { readOpChecksumInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder setReadOpChecksumInfo( org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.Builder builderForValue) { if (readOpChecksumInfoBuilder_ == null) { readOpChecksumInfo_ = builderForValue.build(); onChanged(); } else { readOpChecksumInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; return this; } /**
private static Builder create() { return new Builder(); }
private static Builder create() { return new Builder(); }
private static Builder create() { return new Builder(); }
public Builder clone() { return create().mergeFrom(buildPartial()); }
public Builder clone() { return create().mergeFrom(buildPartial()); }