"=" + suggestedTargetChunkSize); } else { logger.error("Job Failed: " + runningJob.getFailureInfo());
public static RunningJob myCustomRunJob(JobConf job) throws Exception { JobClient jc = new JobClient(job); RunningJob rj = jc.submitJob(job); if (!jc.monitorAndPrintJob(job, rj)) { throw new IOException("Job failed with info: " + rj.getFailureInfo()); } return rj; }
public static void runSortJob(String ... args) throws Exception { Path input = new Path(args[0]); Path output = new Path(args[1]); JobConf job = new JobConf(); job.setNumReduceTasks(2); job.setInputFormat(KeyValueTextInputFormat.class); job.setOutputFormat(TextOutputFormat.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); FileInputFormat.setInputPaths(job, input); FileOutputFormat.setOutputPath(job, output); job.setJarByClass(SampleJob.class); output.getFileSystem(job).delete(output, true); JobClient jc = new JobClient(job); JobClient.setTaskOutputFilter(job, JobClient.TaskStatusFilter.ALL); RunningJob rj = jc.submitJob(job); try { if (!jc.monitorAndPrintJob(job, rj)) { System.out.println("Job Failed: " + rj.getFailureInfo()); throw new IOException("Job failed!"); } } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } }
protected void dumpDebugInfo() { try { if( runningJob == null ) return; Integer jobState = getJobStateSafe(); // if call throws an NPE internally if( jobState == null ) return; flowStep.logWarn( "hadoop job " + runningJob.getID() + " state at " + JobStatus.getJobRunState( jobState ) ); flowStep.logWarn( "failure info: " + runningJob.getFailureInfo() ); TaskCompletionEvent[] events = runningJob.getTaskCompletionEvents( 0 ); flowStep.logWarn( "task completion events identify failed tasks" ); flowStep.logWarn( "task completion events count: " + events.length ); for( TaskCompletionEvent event : events ) flowStep.logWarn( "event = " + event ); } catch( Throwable throwable ) { flowStep.logError( "failed reading task completion events", throwable ); } }
protected void dumpDebugInfo() { try { if( runningJob == null ) return; Integer jobState = getJobStateSafe(); // if call throws an NPE internally if( jobState == null ) return; flowStep.logWarn( "hadoop job " + runningJob.getID() + " state at " + JobStatus.getJobRunState( jobState ) ); flowStep.logWarn( "failure info: " + runningJob.getFailureInfo() ); TaskCompletionEvent[] events = runningJob.getTaskCompletionEvents( 0 ); flowStep.logWarn( "task completion events identify failed tasks" ); flowStep.logWarn( "task completion events count: " + events.length ); for( TaskCompletionEvent event : events ) flowStep.logWarn( "event = " + event ); } catch( Throwable throwable ) { flowStep.logError( "failed reading task completion events", throwable ); } }