@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = getOutput(); if (output.getExtra().containsKey(START_TIME)) { final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID); if (sparkJobID == null) { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { String status = getAppState(sparkJobID); if (status == null || status.equals("FAILED") || status.equals("KILLED")) { //remove previous mr job info super.onExecuteStart(executableContext); } else { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
if (job == null || job.getJobState() == JobStatus.State.FAILED) { super.onExecuteStart(executableContext); } else { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); super.onExecuteStart(executableContext); } catch (InterruptedException e) { Thread.currentThread().interrupt(); logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); super.onExecuteStart(executableContext);
onExecuteStart(executableContext); Throwable catchedException; Throwable realException;
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = executableManager.getOutput(getId()); if (output.getExtra().containsKey(START_TIME)) { final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID); if (mrJobId == null) { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { Job job = new Cluster(new Configuration()).getJob(JobID.forName(mrJobId)); if (job.getJobState() == JobStatus.State.FAILED) { //remove previous mr job info super.onExecuteStart(executableContext); } else { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } catch (InterruptedException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
@Override public final ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException { //print a eye-catching title in log LogTitlePrinter.printTitle(this.getName()); Preconditions.checkArgument(executableContext instanceof DefaultContext); ExecuteResult result; try { onExecuteStart(executableContext); result = doWork(executableContext); } catch (Throwable e) { logger.error("error running Executable", e); onExecuteError(e, executableContext); throw new ExecuteException(e); } onExecuteFinished(result, executableContext); return result; }
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = getOutput(); if (output.getExtra().containsKey(START_TIME)) { final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID); if (sparkJobID == null) { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { String status = getAppState(sparkJobID); if (status == null || status.equals("FAILED") || status.equals("KILLED")) { //remove previous mr job info super.onExecuteStart(executableContext); } else { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
if (job == null || job.getJobState() == JobStatus.State.FAILED) { super.onExecuteStart(executableContext); } else { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null); super.onExecuteStart(executableContext); } catch (InterruptedException e) { Thread.currentThread().interrupt(); logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); super.onExecuteStart(executableContext);
@Override protected void onExecuteStart(ExecutableContext executableContext) { final Output output = executableManager.getOutput(getId()); if (output.getExtra().containsKey(START_TIME)) { final String mrJobId = output.getExtra().get(ExecutableConstants.MR_JOB_ID); if (mrJobId == null) { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); return; } try { Configuration conf = HadoopUtil.getCurrentConfiguration(); Job job = new Cluster(conf).getJob(JobID.forName(mrJobId)); if (job.getJobState() == JobStatus.State.FAILED) { //remove previous mr job info super.onExecuteStart(executableContext); } else { executableManager.updateJobOutput(getId(), ExecutableState.RUNNING, null, null); } } catch (IOException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } catch (InterruptedException e) { logger.warn("error get hadoop status"); super.onExecuteStart(executableContext); } } else { super.onExecuteStart(executableContext); } }
@Override public final ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException { //print a eye-catching title in log LogTitlePrinter.printTitle(this.getName()); Preconditions.checkArgument(executableContext instanceof DefaultContext); ExecuteResult result; try { onExecuteStart(executableContext); result = doWork(executableContext); } catch (Throwable e) { logger.error("error running Executable", e); onExecuteError(e, executableContext); throw new ExecuteException(e); } onExecuteFinished(result, executableContext); return result; }
onExecuteStart(executableContext); Throwable catchedException; Throwable realException;