private SparkStageInfo getStageInfo(int stageId) { return sparkContext.statusTracker().getStageInfo(stageId); } }
private SparkStageInfo getStageInfo(int stageId) { return sparkContext.statusTracker().getStageInfo(stageId); } }
private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
@Override public SparkStageInfo call(JobContext jc) throws Exception { return jc.sc().statusTracker().getStageInfo(stageId); } }
@Override public ArrayList<SparkStageInfo> call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { ArrayList<JavaFutureAction<?>> list = new ArrayList<>(jc.getMonitoredJobs().get(clientJobId)); int[] stageIds = jobInfo.stageIds(); for(Integer stageid : stageIds) { SparkStageInfo stageInfo = jc.sc().statusTracker().getStageInfo(stageid); sparkStageInfos.add(stageInfo);
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }
SparkJobInfo sparkJobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (sparkJobInfo != null && sparkJobInfo.stageIds() != null && sparkJobInfo.stageIds().length > 0) {
private SparkStageInfo getStageInfo(int stageId) { return sparkContext.statusTracker().getStageInfo(stageId); } }
private static SparkJobInfo getJobInfo(int jobID, JavaSparkContext sparkContext) { SparkJobInfo jobInfo = sparkContext.statusTracker().getJobInfo(jobID); if (jobInfo == null) { throw new RuntimeException("No jobInfo available for jobID " + jobID); } return jobInfo; }
private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
@Override public SparkStageInfo call(JobContext jc) throws Exception { return jc.sc().statusTracker().getStageInfo(stageId); } }
/** * In Spark, currently only async actions return job id. There is no async * equivalent of actions like saveAsNewAPIHadoopFile() * <p/> * The only other way to get a job id is to register a "job group ID" with * the spark context and request all job ids corresponding to that job group * via getJobIdsForGroup. * <p/> * However getJobIdsForGroup does not guarantee the order of the elements in * it's result. * <p/> * This method simply returns the previously unseen job ids. * * @param seenJobIDs job ids in the job group that are already seen * @return Spark job ids not seen before */ private List<Integer> getJobIDs(Set<Integer> seenJobIDs) { Set<Integer> groupjobIDs = new HashSet<Integer>( Arrays.asList(ArrayUtils.toObject(sparkContext.statusTracker() .getJobIdsForGroup(jobGroupID)))); groupjobIDs.removeAll(seenJobIDs); List<Integer> unseenJobIDs = new ArrayList<Integer>(groupjobIDs); seenJobIDs.addAll(unseenJobIDs); return unseenJobIDs; }
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }