private void logJob(String logDir, String jobID, PrintWriter listWriter) throws IOException { RunningJob rj = jobClient.getJob(JobID.forName(jobID)); String jobURLString = rj.getTrackingURL();
private void updateMapRedStatsJson(MapRedStats stats, RunningJob rj) throws IOException, JSONException { if (statsJSON == null) { statsJSON = new JSONObject(); } if (stats != null) { if (stats.getNumMap() >= 0) { statsJSON.put(NUMBER_OF_MAPPERS, stats.getNumMap()); } if (stats.getNumReduce() >= 0) { statsJSON.put(NUMBER_OF_REDUCERS, stats.getNumReduce()); } if (stats.getCounters() != null) { statsJSON.put(COUNTERS, getCountersJson(stats.getCounters())); } } if (rj != null) { statsJSON.put(JOB_ID, rj.getID().toString()); statsJSON.put(JOB_FILE, rj.getJobFile()); statsJSON.put(TRACKING_URL, rj.getTrackingURL()); statsJSON.put(MAP_PROGRESS, Math.round(rj.mapProgress() * 100)); statsJSON.put(REDUCE_PROGRESS, Math.round(rj.reduceProgress() * 100)); statsJSON.put(CLEANUP_PROGRESS, Math.round(rj.cleanupProgress() * 100)); statsJSON.put(SETUP_PROGRESS, Math.round(rj.setupProgress() * 100)); statsJSON.put(COMPLETE, rj.isComplete()); statsJSON.put(SUCCESSFUL, rj.isSuccessful()); } }
if (!conf.get("mapred.job.tracker", "local").equals("local")) { console.printError("Job Tracking URL: " + rj.getTrackingURL());
if (!conf.get("mapred.job.tracker", "local").equals("local")) { console.printError("Job Tracking URL: " + rj.getTrackingURL());
public void run(JobConf conf) throws Exception { _runningJob = new JobClient(conf).submitJob(conf); info("See " + _runningJob.getTrackingURL() + " for details."); _runningJob.waitForCompletion(); if(!_runningJob.isSuccessful()) { throw new Exception("Hadoop job:" + getId() + " failed!"); } // dump all counters Counters counters = _runningJob.getCounters(); for(String groupName: counters.getGroupNames()) { Counters.Group group = counters.getGroup(groupName); info("Group: " + group.getDisplayName()); for(Counter counter: group) info(counter.getDisplayName() + ":\t" + counter.getValue()); } }
jobId = runningJob.getID().toString(); jobName = runningJob.getJobName(); trackingURL = runningJob.getTrackingURL(); isComplete = runningJob.isComplete(); isSuccessful = runningJob.isSuccessful();
/** * from StreamJob.java. */ public void jobInfo(RunningJob rj) { if (ShimLoader.getHadoopShims().isLocalMode(job)) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, ConfVars.MAPREDBIN) + " job -kill " + rj.getID()); } }
/** * from StreamJob.java. */ public void jobInfo(RunningJob rj) { if (ShimLoader.getHadoopShims().isLocalMode(job)) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, HiveConf.ConfVars.HADOOPBIN) + " job -kill " + rj.getID()); } }
/** * Get the URL where some job progress information will be displayed. * * @return the URL where some job progress information will be displayed. */ public String getTrackingURL() { ensureState(JobState.RUNNING); return info.getTrackingURL(); }
protected void internalNonBlockingStart() throws IOException { jobClient = new JobClient( jobConfiguration ); runningJob = internalNonBlockingSubmit(); flowStep.logInfo( "submitted hadoop job: " + runningJob.getID() ); if( runningJob.getTrackingURL() != null ) flowStep.logInfo( "tracking url: " + runningJob.getTrackingURL() ); }
protected void internalNonBlockingStart() throws IOException { jobClient = new JobClient( jobConfiguration ); runningJob = internalNonBlockingSubmit(); flowStep.logInfo( "submitted hadoop job: " + runningJob.getID() ); if( runningJob.getTrackingURL() != null ) flowStep.logInfo( "tracking url: " + runningJob.getTrackingURL() ); }
private void logJob(String logDir, String jobID, PrintWriter listWriter) throws IOException { RunningJob rj = jobClient.getJob(JobID.forName(jobID)); String jobURLString = rj.getTrackingURL();
if (!conf.get("mapred.job.tracker", "local").equals("local")) { console.printError("Job Tracking URL: " + rj.getTrackingURL());
@Override public void onStepStarting(FlowStep flowStep) { LOG.info("Step starting"); try { HadoopStepStats hdStepStats = (HadoopStepStats)flowStep.getFlowStepStats(); RunningJob job = hdStepStats.getRunningJob(); persister.onRunning(new LaunchedJob(job.getID().toString(), job.getJobName(), job.getTrackingURL()) ); } catch (NullPointerException | IOException e) { // no op } }
String jobUrlStr = rj.getTrackingURL(); URL jobUrl = new URL(jobUrlStr); URL jobConfUrl = new URL(jobUrlStr.replace(JOBDETAILS, JOBCONF));
/** * from StreamJob.java. */ private void jobInfo(RunningJob rj) { if (job.get("mapred.job.tracker", "local").equals("local")) { console.printInfo("Job running in-process (local Hadoop)"); } else { String hp = job.get("mapred.job.tracker"); if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(), Keys.TASK_HADOOP_ID, rj.getJobID()); } console.printInfo(ExecDriver.getJobStartMsg(rj.getJobID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, HiveConf.ConfVars.HADOOPBIN) + " job -Dmapred.job.tracker=" + hp + " -kill " + rj.getJobID()); } }
runningJobKillURIs.put(rj.getJobID(), rj.getTrackingURL() + "&action=kill");
/** * from StreamJob.java. */ public void jobInfo(RunningJob rj) { if (ShimLoader.getHadoopShims().isLocalMode(job)) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(SessionState.get().getQueryId(), getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, HiveConf.ConfVars.HADOOPBIN) + " job -kill " + rj.getID()); } }