/** * <code>optional .hadoop.mapreduce.JobIdProto job_id = 1;</code> */ public org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder getJobIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getJobIdFieldBuilder().getBuilder(); } /**
/** * <code>optional .hadoop.mapreduce.JobIdProto job_id = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> getJobIdFieldBuilder() { if (jobIdBuilder_ == null) { jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>( jobId_, getParentForChildren(), isClean()); jobId_ = null; } return jobIdBuilder_; }
public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_;
public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_;
public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_;
public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_;
private static Builder create() { return new Builder(); }
private Builder() { maybeForceBuilderInitialization(); }
private static Builder create() { return new Builder(); }
private Builder() { maybeForceBuilderInitialization(); }
/** * <code>optional .hadoop.mapreduce.JobIdProto job_id = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> getJobIdFieldBuilder() { if (jobIdBuilder_ == null) { jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>( jobId_, getParentForChildren(), isClean()); jobId_ = null; } return jobIdBuilder_; }
/** * <code>optional .hadoop.mapreduce.JobIdProto job_id = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> getJobIdFieldBuilder() { if (jobIdBuilder_ == null) { jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>( jobId_, getParentForChildren(), isClean()); jobId_ = null; } return jobIdBuilder_; }
private Builder() { maybeForceBuilderInitialization(); }
private Builder() { maybeForceBuilderInitialization(); }
public Builder clone() { return create().mergeFrom(buildPartial()); }
/** * <code>optional .hadoop.mapreduce.JobIdProto job_id = 1;</code> */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder> getJobIdFieldBuilder() { if (jobIdBuilder_ == null) { jobIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto.Builder, org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder>( jobId_, getParentForChildren(), isClean()); jobId_ = null; } return jobIdBuilder_; }
public Builder clone() { return create().mergeFrom(buildPartial()); }
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public static Builder newBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }