@Override public State getExecutionMetadata() { return getTaskState(); }
@Override public State getPersistentState() { return getTaskState(); }
@Override public WorkUnitState.WorkingState getWorkingState() { return getTaskState().getWorkingState(); }
@Override public void onTaskCommitCompletion(Task task) { if (GobblinMetrics.isEnabled(task.getTaskState().getWorkunit())) { // Update record-level metrics after the task is done task.updateRecordMetrics(); task.updateByteMetrics(); } // Cancel the task state reporter associated with this task. The reporter might // not be found for the given task because the task fails before the task is // registered. So we need to make sure the reporter exists before calling cancel. if (this.scheduledReporters.containsKey(task.getTaskId())) { this.scheduledReporters.remove(task.getTaskId()).cancel(false); } LOGGER.info(String .format("Task %s completed in %dms with state %s", task.getTaskId(), task.getTaskState().getTaskDuration(), task.getTaskState().getWorkingState())); }
/** * Retry a failed {@link Task}. * * @param task failed {@link Task} to be retried */ public void retry(Task task) { if (GobblinMetrics.isEnabled(task.getTaskState().getWorkunit()) && task.getTaskState().contains(ConfigurationKeys.FORK_BRANCHES_KEY)) { // Adjust metrics to clean up numbers from the failed task task.getTaskState() .adjustJobMetricsOnRetry(task.getTaskState().getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY)); } // Task retry interval increases linearly with number of retries long interval = task.getRetryCount() * this.retryIntervalInSeconds; // Schedule the retry of the failed task this.taskExecutor.schedule(new TrackingTask(task, interval, TimeUnit.SECONDS), interval, TimeUnit.SECONDS); LOG.info(String.format("Scheduled retry of failed task %s to run in %d seconds", task.getTaskId(), interval)); task.incrementRetryCount(); }
@Override public void onTaskCommitCompletion(Task task) { try { if (GobblinMetrics.isEnabled(task.getTaskState().getWorkunit())) { this.jobState.addTaskState(task.getTaskState()); this.eventBus.post(new NewTaskCompletionEvent(ImmutableList.of(task.getTaskState()))); task.getTaskState().getTaskDuration(), task.getTaskState().getWorkingState()));
@Override public void onTaskCommitCompletion(Task task) { WorkUnit workUnit = task.getTaskState().getWorkunit(); if (GobblinMetrics.isEnabled(workUnit)) { task.updateRecordMetrics(); task.updateByteMetrics(); if (workUnit.getPropAsBoolean(ConfigurationKeys.MR_REPORT_METRICS_AS_COUNTERS_KEY, ConfigurationKeys.DEFAULT_MR_REPORT_METRICS_AS_COUNTERS)) { updateCounters(task); } } LOG.info(String .format("Task %s completed running in %dms with state %s", task.getTaskId(), task.getTaskState().getTaskDuration(), task.getTaskState().getWorkingState())); }
log.info("Writing task state for task " + task.getTaskId()); taskStateStore.put(task.getJobId(), task.getTaskId() + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX, task.getTaskState()); if (task.getTaskState().getWorkingState() == WorkUnitState.WorkingState.FAILED) { hasTaskFailure = true; if (task.getTaskState().contains(ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY)) { log.error(String.format("Task %s failed due to exception: %s", task.getTaskId(), task.getTaskState().getProp(ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY))); if (task.getTaskState().getWorkingState() == WorkUnitState.WorkingState.SUCCESSFUL || task.getTaskState().getWorkingState() == WorkUnitState.WorkingState.COMMITTED) { taskStateStore.put(task.getJobId(), task.getTaskId() + TASK_STATE_STORE_SUCCESS_MARKER_SUFFIX, task.getTaskState());
@Override public void onTaskRunCompletion(Task task) { try { // Check the task state and handle task retry if task failed and // it has not reached the maximum number of retries WorkUnitState.WorkingState state = task.getTaskState().getWorkingState(); if (state == WorkUnitState.WorkingState.FAILED && task.getRetryCount() < this.maxTaskRetries) { this.taskExecutor.retry(task); return; } } catch (Throwable t) { LOG.error("Failed to process a task completion callback", t); } // Mark the completion of this task task.markTaskCompletion(); }
protected void updateTaskMetrics() { if (GobblinMetrics.isEnabled(this.task.getTaskState().getWorkunit())) { this.task.updateRecordMetrics(); this.task.updateByteMetrics(); } } }
@Override protected void updateTaskMetrics() { super.updateTaskMetrics(); WorkUnit workUnit = this.task.getTaskState().getWorkunit(); if (GobblinMetrics.isEnabled(workUnit)) { if (workUnit.getPropAsBoolean(ConfigurationKeys.MR_REPORT_METRICS_AS_COUNTERS_KEY, ConfigurationKeys.DEFAULT_MR_REPORT_METRICS_AS_COUNTERS)) { updateCounters(this.task); } } // Tell the TaskTracker it's making progress this.context.progress(); } }
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.FAILED); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
@Test public void testControlMessages() throws Exception { MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"), new BasicTestControlMessage("1"), new RecordEnvelope<>("b"), new BasicTestControlMessage("2")}); MyConverter converter = new MyConverter(); MyDataWriter writer = new MyDataWriter(); Task task = setupTask(extractor, writer, converter); task.run(); task.commit(); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL); Assert.assertEquals(converter.records, Lists.newArrayList("a", "b")); Assert.assertEquals(converter.messages, Lists.newArrayList(new BasicTestControlMessage("1"), new BasicTestControlMessage("2"))); Assert.assertEquals(writer.records, Lists.newArrayList("a", "b")); Assert.assertEquals(writer.messages, Lists.newArrayList(new BasicTestControlMessage("1"), new BasicTestControlMessage("2"))); }
@Test public void testFlushControlMessages() throws Exception { MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"), FlushControlMessage.builder().flushReason("flush1").build(), new RecordEnvelope<>("b"), FlushControlMessage.builder().flushReason("flush2").build()}); MyConverter converter = new MyConverter(); MyFlushDataWriter writer = new MyFlushDataWriter(); Task task = setupTask(extractor, writer, converter); task.run(); task.commit(); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL); Assert.assertEquals(converter.records, Lists.newArrayList("a", "b")); Assert.assertEquals(converter.messages, Lists.newArrayList( FlushControlMessage.builder().flushReason("flush1").build(), FlushControlMessage.builder().flushReason("flush2").build())); Assert.assertEquals(writer.records, Lists.newArrayList("a", "b")); Assert.assertEquals(writer.flush_messages, Lists.newArrayList("flush called", "flush called")); }
/** * Test the injection of {@link ControlMessage}s * @throws Exception */ @Test public void testInjectedControlMessages() throws Exception { MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("schema:a"), new RecordEnvelope<>("schema:b"), new RecordEnvelope<>("schema1:c"), new RecordEnvelope<>("schema2:d")}); SchemaChangeDetectionInjector injector = new SchemaChangeDetectionInjector(); SchemaAppendConverter converter = new SchemaAppendConverter(); MyDataWriter writer = new MyDataWriterWithSchemaCheck(); Task task = setupTask(extractor, writer, Collections.EMPTY_LIST, Lists.newArrayList(injector, converter)); task.run(); task.commit(); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL); Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:schema", "c:schema1", "d:schema2")); Assert.assertEquals(converter.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema2").build()))); Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:schema", "c:schema1", "d:schema2")); Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>( GlobalMetadata.<String>builder().schema("schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema2").build()))); }
/** * Test with the converter configured in the list of {@link RecordStreamProcessor}s. * @throws Exception */ @Test public void testMetadataUpdateWithStreamProcessors() throws Exception { MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new RecordEnvelope<>("b"), new MetadataUpdateControlMessage(GlobalMetadata.<String>builder().schema("Schema2").build())}); SchemaAppendConverter converter = new SchemaAppendConverter(); MyDataWriter writer = new MyDataWriter(); Task task = setupTask(extractor, writer, Collections.EMPTY_LIST, Lists.newArrayList(converter)); task.run(); task.commit(); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL); Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:Schema1")); Assert.assertEquals(converter.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build()))); Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:Schema1")); Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>( GlobalMetadata.<String>builder().schema("Schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build()))); }
/** * Test of metadata update control messages that signal the converters to change schemas * @throws Exception */ @Test public void testMetadataUpdateControlMessages() throws Exception { MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new RecordEnvelope<>("b"), new MetadataUpdateControlMessage(GlobalMetadata.<String>builder().schema("Schema2").build())}); SchemaAppendConverter converter = new SchemaAppendConverter(); MyDataWriter writer = new MyDataWriter(); Task task = setupTask(extractor, writer, converter); task.run(); task.commit(); Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL); Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:Schema1")); Assert.assertEquals(converter.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build()))); Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:Schema1")); Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>( GlobalMetadata.<String>builder().schema("Schema1").build()), new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build()))); }
@Override public State getExecutionMetadata() { return getTaskState(); }
protected void updateTaskMetrics() { if (GobblinMetrics.isEnabled(this.task.getTaskState().getWorkunit())) { this.task.updateRecordMetrics(); this.task.updateByteMetrics(); } } }