private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(0); List<Integer> reducersRunTimes = new ArrayList<Integer>(); for (TaskCompletionEvent taskCompletion : taskCompletions) { if (!taskCompletion.isMapTask()) { reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime())); } } // Compute the reducers run time statistics for the job ReducerTimeStatsPerJob reducerTimeStatsPerJob = new ReducerTimeStatsPerJob(reducersRunTimes); // Adding the reducers run time statistics for the job in the QueryPlan this.task.getQueryPlan().getReducerTimeStatsPerJobList().add(reducerTimeStatsPerJob); return; }
private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(0); List<Integer> reducersRunTimes = new ArrayList<Integer>(); for (TaskCompletionEvent taskCompletion : taskCompletions) { if (!taskCompletion.isMapTask()) { reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime())); } } // Compute the reducers run time statistics for the job ReducerTimeStatsPerJob reducerTimeStatsPerJob = new ReducerTimeStatsPerJob(reducersRunTimes); // Adding the reducers run time statistics for the job in the QueryPlan this.task.getQueryPlan().getReducerTimeStatsPerJobList().add(reducerTimeStatsPerJob); return; }
private void getTaskInfos() throws IOException, MalformedURLException { int startIndex = 0; while (true) { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex);
private void getTaskInfos() throws IOException, MalformedURLException { int startIndex = 0; while (true) { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex);
@Override public TaskCompletionEvent[] getTaskCompletionEvents(int index) throws IOException { return job.getTaskCompletionEvents(index); }
/** * Get events indicating completion (success/failure) of component tasks. * * @param startFrom index to start fetching events from * @return an array of {@link TaskCompletionEvent}s * @throws IOException */ public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom ) throws IOException { ensureState(JobState.RUNNING); return info.getTaskCompletionEvents(startFrom); }
@Override public TaskCompletionEvent[] getTaskCompletionEvents( int startIndex ) throws IOException { org.apache.hadoop.mapred.TaskCompletionEvent[] events = delegate.getTaskCompletionEvents( startIndex ); TaskCompletionEvent[] wrapped = new TaskCompletionEvent[ events.length ]; for ( int i = 0; i < wrapped.length; i++ ) { wrapped[ i ] = new TaskCompletionEventProxy( events[ i ] ); } return wrapped; }
private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(0); List<Integer> reducersRunTimes = new ArrayList<Integer>(); for (TaskCompletionEvent taskCompletion : taskCompletions) { if (!taskCompletion.isMapTask()) { reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime())); } } // Compute the reducers run time statistics for the job ReducerTimeStatsPerJob reducerTimeStatsPerJob = new ReducerTimeStatsPerJob(reducersRunTimes); // Adding the reducers run time statistics for the job in the QueryPlan this.task.getQueryPlan().getReducerTimeStatsPerJobList().add(reducerTimeStatsPerJob); return; }
TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex);
TaskCompletionEvent[] taskComplEvents = rj.getTaskCompletionEvents(0);
private void getTaskInfos() throws IOException, MalformedURLException { int startIndex = 0; while (true) { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex);
TaskCompletionEvent[] taskComplEvents = rj.getTaskCompletionEvents(0);
running.getTaskCompletionEvents(eventCounter); eventCounter += events.length; for(TaskCompletionEvent event : events){
protected void dumpDebugInfo() { try { if( runningJob == null ) return; Integer jobState = getJobStateSafe(); // if call throws an NPE internally if( jobState == null ) return; flowStep.logWarn( "hadoop job " + runningJob.getID() + " state at " + JobStatus.getJobRunState( jobState ) ); flowStep.logWarn( "failure info: " + runningJob.getFailureInfo() ); TaskCompletionEvent[] events = runningJob.getTaskCompletionEvents( 0 ); flowStep.logWarn( "task completion events identify failed tasks" ); flowStep.logWarn( "task completion events count: " + events.length ); for( TaskCompletionEvent event : events ) flowStep.logWarn( "event = " + event ); } catch( Throwable throwable ) { flowStep.logError( "failed reading task completion events", throwable ); } }
public void testPersistency() throws Exception { Properties config = new Properties(); config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "true"); config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS_HOURS, "1"); startCluster(true, config); JobID jobId = runJob(); JobClient jc = new JobClient(createJobConf()); RunningJob rj0 = jc.getJob(jobId); assertNotNull(rj0); boolean sucessfull0 = rj0.isSuccessful(); String jobName0 = rj0.getJobName(); Counters counters0 = rj0.getCounters(); TaskCompletionEvent[] events0 = rj0.getTaskCompletionEvents(0); stopCluster(); startCluster(false, config); jc = new JobClient(createJobConf()); RunningJob rj1 = jc.getJob(jobId); assertNotNull(rj1); assertEquals(sucessfull0, rj1.isSuccessful()); assertEquals(jobName0, rj0.getJobName()); assertEquals(counters0.size(), rj1.getCounters().size()); TaskCompletionEvent[] events1 = rj1.getTaskCompletionEvents(0); assertEquals(events0.length, events1.length); for (int i = 0; i < events0.length; i++) { assertEquals(events0[i].getTaskAttemptId(), events1[i].getTaskAttemptId()); assertEquals(events0[i].getTaskStatus(), events1[i].getTaskStatus()); } }
protected void dumpDebugInfo() { try { if( runningJob == null ) return; Integer jobState = getJobStateSafe(); // if call throws an NPE internally if( jobState == null ) return; flowStep.logWarn( "hadoop job " + runningJob.getID() + " state at " + JobStatus.getJobRunState( jobState ) ); flowStep.logWarn( "failure info: " + runningJob.getFailureInfo() ); TaskCompletionEvent[] events = runningJob.getTaskCompletionEvents( 0 ); flowStep.logWarn( "task completion events identify failed tasks" ); flowStep.logWarn( "task completion events count: " + events.length ); for( TaskCompletionEvent event : events ) flowStep.logWarn( "event = " + event ); } catch( Throwable throwable ) { flowStep.logError( "failed reading task completion events", throwable ); } }
job.getTaskCompletionEvents(eventCounter); eventCounter += events.length; for(TaskCompletionEvent event : events){