break; } else if (i >= MAX_RETRY_TIMES - 1) { if (app.getState().equals(Constants.AppState.FINISHED.toString())) { mrJobEntityMap.keySet().forEach(this::finishMRJob);
appInfo.setState(Constants.AppState.FINISHED.toString()); apps.add(appInfo);
this.sparkRunningJobManager.update(app.getId(), id, attemptEntity); } else { attemptEntity.setYarnState(Constants.AppState.FINISHED.toString()); attemptEntity.setYarnStatus(Constants.AppStatus.FAILED.toString());
appInfo.setState(Constants.AppState.FINISHED.toString()); apps.add(appInfo);
.filter(jobId -> mrJobEntityMap.get(jobId).getInternalState() != null) .filter( jobId -> mrJobEntityMap.get(jobId).getInternalState().equals(Constants.AppState.FINISHED.toString()) || mrJobEntityMap.get(jobId).getInternalState().equals(Constants.AppState.FAILED.toString())) .forEach( jobId -> this.runningJobManager.delete(app.getId(), jobId));
}); if (appInfo.getState().equals(Constants.AppState.FINISHED.toString()) || applicationParser.status() == SparkApplicationParser.ParserStatus.FINISHED) { applicationParser.setStatus(SparkApplicationParser.ParserStatus.RUNNING);
@Override public void execute(Tuple tuple) { AppInfo appInfo = (AppInfo) tuple.getValue(1); Map<String, JobExecutionAPIEntity> mrJobs = (Map<String, JobExecutionAPIEntity>) tuple.getValue(2); LOG.debug("get mr yarn application " + appInfo.getId()); MRJobParser applicationParser; if (!runningMRParsers.containsKey(appInfo.getId())) { applicationParser = new MRJobParser(endpointConfig, eagleServiceConfig, appInfo, mrJobs, runningJobManager, configKeys, this.config); runningMRParsers.put(appInfo.getId(), applicationParser); LOG.info("create application parser for {}", appInfo.getId()); } else { applicationParser = runningMRParsers.get(appInfo.getId()); applicationParser.setAppInfo(appInfo); } Set<String> runningParserIds = new HashSet<>(runningMRParsers.keySet()); runningParserIds.stream() .filter(appId -> runningMRParsers.get(appId).status() == MRJobParser.ParserStatus.APP_FINISHED) .forEach(appId -> { runningMRParsers.remove(appId); LOG.info("remove parser {}", appId); }); if (appInfo.getState().equals(Constants.AppState.FINISHED.toString()) || applicationParser.status() == MRJobParser.ParserStatus.FINISHED) { applicationParser.setStatus(MRJobParser.ParserStatus.RUNNING); executorService.execute(applicationParser); } }
private void finishSparkApp(String sparkAppId) { SparkAppEntity attemptEntity = sparkAppEntityMap.get(sparkAppId); attemptEntity.setYarnState(Constants.AppState.FINISHED.toString()); attemptEntity.setYarnStatus(Constants.AppStatus.FAILED.toString()); sparkJobConfigs.remove(sparkAppId); if (sparkJobConfigs.size() == 0) { this.parserStatus = ParserStatus.APP_FINISHED; } stagesTime.clear(); LOG.info("spark application {} has been finished", sparkAppId); }
private void finishMRJob(String mrJobId) { JobExecutionAPIEntity jobExecutionAPIEntity = mrJobEntityMap.get(mrJobId); jobExecutionAPIEntity.setInternalState(Constants.AppState.FINISHED.toString()); jobExecutionAPIEntity.setCurrentState(Constants.AppState.RUNNING.toString()); // set an estimated job finished time because it's hard the get the specific one jobExecutionAPIEntity.setEndTime(System.currentTimeMillis()); mrJobConfigs.remove(mrJobId); if (mrJobConfigs.size() == 0) { this.parserStatus = ParserStatus.APP_FINISHED; } LOG.info("mr job {} has been finished", mrJobId); }