@Override public int[] getStageIds() { SparkJobInfo sparkJobInfo = getJobInfo(); return sparkJobInfo == null ? new int[0] : sparkJobInfo.stageIds(); }
@Override public Map<String, SparkStageProgress> getSparkStageProgress() { Map<String, SparkStageProgress> stageProgresses = new HashMap<String, SparkStageProgress>(); for (int stageId : getStageIds()) { SparkStageInfo sparkStageInfo = getStageInfo(stageId); if (sparkStageInfo != null) { int runningTaskCount = sparkStageInfo.numActiveTasks(); int completedTaskCount = sparkStageInfo.numCompletedTasks(); int failedTaskCount = sparkStageInfo.numFailedTasks(); int totalTaskCount = sparkStageInfo.numTasks(); SparkStageProgress sparkStageProgress = new SparkStageProgress( totalTaskCount, completedTaskCount, runningTaskCount, failedTaskCount); stageProgresses.put(String.valueOf(sparkStageInfo.stageId()) + "_" + sparkStageInfo.currentAttemptId(), sparkStageProgress); } } return stageProgresses; }
LocalSparkJobStatus sparkJobStatus = new LocalSparkJobStatus( sc, jobId, jobMetricsListener, sparkCounters, plan.getCachedRDDIds(), future); return new LocalSparkJobRef(Integer.toString(jobId), hiveConf, sparkJobStatus, sc);
@Override public SparkStatistics getSparkStatistics() { SparkStatisticsBuilder sparkStatisticsBuilder = new SparkStatisticsBuilder(); // add Hive operator level statistics. sparkStatisticsBuilder.add(sparkCounters); // add spark job metrics. String jobIdentifier = "Spark Job[" + jobId + "] Metrics"; Map<String, List<TaskMetrics>> jobMetric = jobMetricsListener.getJobMetric(jobId); if (jobMetric == null) { return null; } Map<String, Long> flatJobMetric = combineJobLevelMetrics(jobMetric); for (Map.Entry<String, Long> entry : flatJobMetric.entrySet()) { sparkStatisticsBuilder.add(jobIdentifier, entry.getKey(), Long.toString(entry.getValue())); } return sparkStatisticsBuilder.build(); }
LocalSparkJobStatus sparkJobStatus = new LocalSparkJobStatus( sc, jobId, jobMetricsListener, sparkCounters, plan.getCachedRDDIds(), future); return new LocalSparkJobRef(Integer.toString(jobId), hiveConf, sparkJobStatus, sc);
@Override public Map<SparkStage, SparkStageProgress> getSparkStageProgress() { Map<SparkStage, SparkStageProgress> stageProgresses = new HashMap<SparkStage, SparkStageProgress>(); for (int stageId : getStageIds()) { SparkStageInfo sparkStageInfo = getStageInfo(stageId); if (sparkStageInfo != null) { int runningTaskCount = sparkStageInfo.numActiveTasks(); int completedTaskCount = sparkStageInfo.numCompletedTasks(); int failedTaskCount = sparkStageInfo.numFailedTasks(); int totalTaskCount = sparkStageInfo.numTasks(); SparkStageProgress sparkStageProgress = new SparkStageProgress( totalTaskCount, completedTaskCount, runningTaskCount, failedTaskCount); SparkStage stage = new SparkStage(sparkStageInfo.stageId(), sparkStageInfo.currentAttemptId()); stageProgresses.put(stage, sparkStageProgress); } } return stageProgresses; }
@Override public int[] getStageIds() { SparkJobInfo sparkJobInfo = getJobInfo(); return sparkJobInfo == null ? new int[0] : sparkJobInfo.stageIds(); }
LocalSparkJobStatus sparkJobStatus = new LocalSparkJobStatus( sc, jobId, jobMetricsListener, sparkCounters, plan.getCachedRDDIds(), future); return new LocalSparkJobRef(Integer.toString(jobId), hiveConf, sparkJobStatus, sc);
@Override public Map<String, SparkStageProgress> getSparkStageProgress() { Map<String, SparkStageProgress> stageProgresses = new HashMap<String, SparkStageProgress>(); for (int stageId : getStageIds()) { SparkStageInfo sparkStageInfo = getStageInfo(stageId); if (sparkStageInfo != null) { int runningTaskCount = sparkStageInfo.numActiveTasks(); int completedTaskCount = sparkStageInfo.numCompletedTasks(); int failedTaskCount = sparkStageInfo.numFailedTasks(); int totalTaskCount = sparkStageInfo.numTasks(); SparkStageProgress sparkStageProgress = new SparkStageProgress( totalTaskCount, completedTaskCount, runningTaskCount, failedTaskCount); stageProgresses.put(String.valueOf(sparkStageInfo.stageId()) + "_" + sparkStageInfo.currentAttemptId(), sparkStageProgress); } } return stageProgresses; }
@Override public JobExecutionStatus getState() { SparkJobInfo sparkJobInfo = getJobInfo(); // For spark job with empty source data, it's not submitted actually, so we would never // receive JobStart/JobEnd event in JobStateListener, use JavaFutureAction to get current // job state. if (sparkJobInfo == null && future.isDone()) { try { future.get(); } catch (Exception e) { LOG.error("Failed to run job " + jobId, e); return JobExecutionStatus.FAILED; } return JobExecutionStatus.SUCCEEDED; } return sparkJobInfo == null ? null : sparkJobInfo.status(); }
@Override public JobExecutionStatus getState() { SparkJobInfo sparkJobInfo = getJobInfo(); // For spark job with empty source data, it's not submitted actually, so we would never // receive JobStart/JobEnd event in JobStateListener, use JavaFutureAction to get current // job state. if (sparkJobInfo == null && future.isDone()) { try { future.get(); } catch (Exception e) { LOG.error("Failed to run job " + jobId, e); return JobExecutionStatus.FAILED; } return JobExecutionStatus.SUCCEEDED; } return sparkJobInfo == null ? null : sparkJobInfo.status(); }
@Override public int[] getStageIds() { SparkJobInfo sparkJobInfo = getJobInfo(); return sparkJobInfo == null ? new int[0] : sparkJobInfo.stageIds(); }
@Override public JobExecutionStatus getState() { SparkJobInfo sparkJobInfo = getJobInfo(); // For spark job with empty source data, it's not submitted actually, so we would never // receive JobStart/JobEnd event in JobStateListener, use JavaFutureAction to get current // job state. if (sparkJobInfo == null && future.isDone()) { try { future.get(); } catch (Exception e) { LOG.error("Failed to run job " + jobId, e); return JobExecutionStatus.FAILED; } return JobExecutionStatus.SUCCEEDED; } return sparkJobInfo == null ? null : sparkJobInfo.status(); }