/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder addTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.add(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.addMessage(index, builderForValue.build()); } return this; } /**
subBuilder.mergeFrom(taskReport_); taskReport_ = subBuilder.buildPartial();
subBuilder.mergeFrom(taskReport_); taskReport_ = subBuilder.buildPartial();
subBuilder.mergeFrom(taskReport_); taskReport_ = subBuilder.buildPartial();
subBuilder.mergeFrom(taskReport_); taskReport_ = subBuilder.buildPartial();
private void addRunningAttemptsToProto() { maybeInitBuilder(); builder.clearRunningAttempts(); if (runningAttempts == null) return; Iterable<TaskAttemptIdProto> iterable = new Iterable<TaskAttemptIdProto>() { @Override public Iterator<TaskAttemptIdProto> iterator() { return new Iterator<TaskAttemptIdProto>() { Iterator<TaskAttemptId> iter = runningAttempts.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public TaskAttemptIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllRunningAttempts(iterable); } @Override
private void addRunningAttemptsToProto() { maybeInitBuilder(); builder.clearRunningAttempts(); if (runningAttempts == null) return; Iterable<TaskAttemptIdProto> iterable = new Iterable<TaskAttemptIdProto>() { @Override public Iterator<TaskAttemptIdProto> iterator() { return new Iterator<TaskAttemptIdProto>() { Iterator<TaskAttemptId> iter = runningAttempts.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public TaskAttemptIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllRunningAttempts(iterable); } @Override
private void addRunningAttemptsToProto() { maybeInitBuilder(); builder.clearRunningAttempts(); if (runningAttempts == null) return; Iterable<TaskAttemptIdProto> iterable = new Iterable<TaskAttemptIdProto>() { @Override public Iterator<TaskAttemptIdProto> iterator() { return new Iterator<TaskAttemptIdProto>() { Iterator<TaskAttemptId> iter = runningAttempts.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public TaskAttemptIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllRunningAttempts(iterable); } @Override
private void addRunningAttemptsToProto() { maybeInitBuilder(); builder.clearRunningAttempts(); if (runningAttempts == null) return; Iterable<TaskAttemptIdProto> iterable = new Iterable<TaskAttemptIdProto>() { @Override public Iterator<TaskAttemptIdProto> iterator() { return new Iterator<TaskAttemptIdProto>() { Iterator<TaskAttemptId> iter = runningAttempts.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public TaskAttemptIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllRunningAttempts(iterable); } @Override
/** * <code>optional .hadoop.mapreduce.TaskReportProto task_report = 1;</code> */ public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) { if (taskReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) { taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder(taskReport_).mergeFrom(value).buildPartial(); } else { taskReport_ = value; } onChanged(); } else { taskReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskReportProto task_report = 1;</code> */ public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) { if (taskReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) { taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder(taskReport_).mergeFrom(value).buildPartial(); } else { taskReport_ = value; } onChanged(); } else { taskReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskReportProto task_report = 1;</code> */ public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) { if (taskReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) { taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder(taskReport_).mergeFrom(value).buildPartial(); } else { taskReport_ = value; } onChanged(); } else { taskReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskReportProto task_report = 1;</code> */ public Builder mergeTaskReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto value) { if (taskReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.getDefaultInstance()) { taskReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.newBuilder(taskReport_).mergeFrom(value).buildPartial(); } else { taskReport_ = value; } onChanged(); } else { taskReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder addTaskReports( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.add(builderForValue.build()); onChanged(); } else { taskReportsBuilder_.addMessage(builderForValue.build()); } return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskReportProto task_report = 1;</code> */ public Builder setTaskReport( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportBuilder_ == null) { taskReport_ = builderForValue.build(); onChanged(); } else { taskReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder setTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.set(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.setMessage(index, builderForValue.build()); } return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder addTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.add(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.addMessage(index, builderForValue.build()); } return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder setTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.set(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.setMessage(index, builderForValue.build()); } return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder addTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.add(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.addMessage(index, builderForValue.build()); } return this; } /**
/** * <code>repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;</code> */ public Builder setTaskReports( int index, org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskReportProto.Builder builderForValue) { if (taskReportsBuilder_ == null) { ensureTaskReportsIsMutable(); taskReports_.set(index, builderForValue.build()); onChanged(); } else { taskReportsBuilder_.setMessage(index, builderForValue.build()); } return this; } /**