if (isValidValue(fields()[0], other.taskid)) { this.taskid = data().deepCopy(fields()[0].schema(), other.taskid); fieldSetFlags()[0] = true; if (isValidValue(fields()[1], other.taskType)) { this.taskType = data().deepCopy(fields()[1].schema(), other.taskType); fieldSetFlags()[1] = true; if (isValidValue(fields()[2], other.finishTime)) { this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime); fieldSetFlags()[2] = true; if (isValidValue(fields()[3], other.error)) { this.error = data().deepCopy(fields()[3].schema(), other.error); fieldSetFlags()[3] = true; if (isValidValue(fields()[4], other.failedDueToAttempt)) { this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt); fieldSetFlags()[4] = true; if (isValidValue(fields()[5], other.status)) { this.status = data().deepCopy(fields()[5].schema(), other.status); fieldSetFlags()[5] = true; if (isValidValue(fields()[6], other.counters)) { this.counters = data().deepCopy(fields()[6].schema(), other.counters); fieldSetFlags()[6] = true;
@Override public TaskFailed build() { try { TaskFailed record = new TaskFailed(); record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]); record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]); record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]); record.error = fieldSetFlags()[3] ? this.error : (java.lang.CharSequence) defaultValue(fields()[3]); record.failedDueToAttempt = fieldSetFlags()[4] ? this.failedDueToAttempt : (java.lang.CharSequence) defaultValue(fields()[4]); record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]); record.counters = fieldSetFlags()[6] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[6]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } }
if (isValidValue(fields()[0], other.taskid)) { this.taskid = data().deepCopy(fields()[0].schema(), other.taskid); fieldSetFlags()[0] = true; if (isValidValue(fields()[1], other.taskType)) { this.taskType = data().deepCopy(fields()[1].schema(), other.taskType); fieldSetFlags()[1] = true; if (isValidValue(fields()[2], other.finishTime)) { this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime); fieldSetFlags()[2] = true; if (isValidValue(fields()[3], other.error)) { this.error = data().deepCopy(fields()[3].schema(), other.error); fieldSetFlags()[3] = true; if (isValidValue(fields()[4], other.failedDueToAttempt)) { this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt); fieldSetFlags()[4] = true; if (isValidValue(fields()[5], other.status)) { this.status = data().deepCopy(fields()[5].schema(), other.status); fieldSetFlags()[5] = true; if (isValidValue(fields()[6], other.counters)) { this.counters = data().deepCopy(fields()[6].schema(), other.counters); fieldSetFlags()[6] = true;
if (isValidValue(fields()[0], other.taskid)) { this.taskid = data().deepCopy(fields()[0].schema(), other.taskid); fieldSetFlags()[0] = true; if (isValidValue(fields()[1], other.taskType)) { this.taskType = data().deepCopy(fields()[1].schema(), other.taskType); fieldSetFlags()[1] = true; if (isValidValue(fields()[2], other.finishTime)) { this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime); fieldSetFlags()[2] = true; if (isValidValue(fields()[3], other.error)) { this.error = data().deepCopy(fields()[3].schema(), other.error); fieldSetFlags()[3] = true; if (isValidValue(fields()[4], other.failedDueToAttempt)) { this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt); fieldSetFlags()[4] = true; if (isValidValue(fields()[5], other.status)) { this.status = data().deepCopy(fields()[5].schema(), other.status); fieldSetFlags()[5] = true; if (isValidValue(fields()[6], other.counters)) { this.counters = data().deepCopy(fields()[6].schema(), other.counters); fieldSetFlags()[6] = true;
if (isValidValue(fields()[0], other.taskid)) { this.taskid = data().deepCopy(fields()[0].schema(), other.taskid); fieldSetFlags()[0] = true; if (isValidValue(fields()[1], other.taskType)) { this.taskType = data().deepCopy(fields()[1].schema(), other.taskType); fieldSetFlags()[1] = true; if (isValidValue(fields()[2], other.finishTime)) { this.finishTime = data().deepCopy(fields()[2].schema(), other.finishTime); fieldSetFlags()[2] = true; if (isValidValue(fields()[3], other.error)) { this.error = data().deepCopy(fields()[3].schema(), other.error); fieldSetFlags()[3] = true; if (isValidValue(fields()[4], other.failedDueToAttempt)) { this.failedDueToAttempt = data().deepCopy(fields()[4].schema(), other.failedDueToAttempt); fieldSetFlags()[4] = true; if (isValidValue(fields()[5], other.status)) { this.status = data().deepCopy(fields()[5].schema(), other.status); fieldSetFlags()[5] = true; if (isValidValue(fields()[6], other.counters)) { this.counters = data().deepCopy(fields()[6].schema(), other.counters); fieldSetFlags()[6] = true;
@Override public TaskFailed build() { try { TaskFailed record = new TaskFailed(); record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]); record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]); record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]); record.error = fieldSetFlags()[3] ? this.error : (java.lang.CharSequence) defaultValue(fields()[3]); record.failedDueToAttempt = fieldSetFlags()[4] ? this.failedDueToAttempt : (java.lang.CharSequence) defaultValue(fields()[4]); record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]); record.counters = fieldSetFlags()[6] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[6]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } }
@Override public TaskFailed build() { try { TaskFailed record = new TaskFailed(); record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]); record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]); record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]); record.error = fieldSetFlags()[3] ? this.error : (java.lang.CharSequence) defaultValue(fields()[3]); record.failedDueToAttempt = fieldSetFlags()[4] ? this.failedDueToAttempt : (java.lang.CharSequence) defaultValue(fields()[4]); record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]); record.counters = fieldSetFlags()[6] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[6]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } }
@Override public TaskFailed build() { try { TaskFailed record = new TaskFailed(); record.taskid = fieldSetFlags()[0] ? this.taskid : (java.lang.CharSequence) defaultValue(fields()[0]); record.taskType = fieldSetFlags()[1] ? this.taskType : (java.lang.CharSequence) defaultValue(fields()[1]); record.finishTime = fieldSetFlags()[2] ? this.finishTime : (java.lang.Long) defaultValue(fields()[2]); record.error = fieldSetFlags()[3] ? this.error : (java.lang.CharSequence) defaultValue(fields()[3]); record.failedDueToAttempt = fieldSetFlags()[4] ? this.failedDueToAttempt : (java.lang.CharSequence) defaultValue(fields()[4]); record.status = fieldSetFlags()[5] ? this.status : (java.lang.CharSequence) defaultValue(fields()[5]); record.counters = fieldSetFlags()[6] ? this.counters : (org.apache.hadoop.mapreduce.jobhistory.JhCounters) defaultValue(fields()[6]); return record; } catch (Exception e) { throw new io.prestosql.hadoop.$internal.org.apache.avro.AvroRuntimeException(e); } } }
/** Checks whether the 'status' field has been set */ public boolean hasStatus() { return fieldSetFlags()[5]; }
/** Checks whether the 'taskid' field has been set */ public boolean hasTaskid() { return fieldSetFlags()[0]; }
/** Checks whether the 'error' field has been set */ public boolean hasError() { return fieldSetFlags()[3]; }
/** Checks whether the 'status' field has been set */ public boolean hasStatus() { return fieldSetFlags()[5]; }
/** Creates a new TaskFailed RecordBuilder by copying an existing Builder */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder other) { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other); }
/** Creates a new TaskFailed RecordBuilder */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder() { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(); }
/** Creates a new TaskFailed RecordBuilder by copying an existing TaskFailed instance */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFailed other) { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other); }
/** Creates a new TaskFailed RecordBuilder by copying an existing TaskFailed instance */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFailed other) { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other); }
/** Creates a new TaskFailed RecordBuilder */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder() { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(); }
/** Creates a new TaskFailed RecordBuilder by copying an existing TaskFailed instance */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder(org.apache.hadoop.mapreduce.jobhistory.TaskFailed other) { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(other); }
/** Creates a new TaskFailed RecordBuilder */ public static org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder newBuilder() { return new org.apache.hadoop.mapreduce.jobhistory.TaskFailed.Builder(); }
/** Checks whether the 'failedDueToAttempt' field has been set */ public boolean hasFailedDueToAttempt() { return fieldSetFlags()[4]; }