/** * <code>optional .hadoop.hdfs.BlockProto marker = 1;</code> */ public Builder setMarker(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) { if (markerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } marker_ = value; onChanged(); } else { markerBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.hdfs.BlockProto marker = 1;</code> */ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getMarkerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMarkerFieldBuilder().getBuilder(); } /**
/** * <code>optional .hadoop.hdfs.BlockProto marker = 1;</code> */ public Builder clearMarker() { if (markerBuilder_ == null) { marker_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance(); onChanged(); } else { markerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>optional .hadoop.hdfs.BlockProto marker = 1;</code> */ public Builder mergeMarker(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) { if (markerBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && marker_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance()) { marker_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.newBuilder(marker_).mergeFrom(value).buildPartial(); } else { marker_ = value; } onChanged(); } else { markerBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.hdfs.BlockProto marker = 1;</code> */ public Builder setMarker( org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) { if (markerBuilder_ == null) { marker_ = builderForValue.build(); onChanged(); } else { markerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**