Method obtainAuthTokenForJob = clazz.getMethod("obtainAuthTokenForJob", Configuration.class, Job.class); obtainAuthTokenForJob.invoke(user, configuration, job); log.info("Obtained HBase Auth Token from ZooKeeper quorum {} for job {}", configuration.get(quorumCfgKey), job.getJobName()); } catch (ClassNotFoundException e) { log.error("Failed to generate or store HBase auth token", e); log.info("Not obtaining HBase Auth Token for MapReduce job " + job.getJobName());
protected int waitForCompletion(Job job) throws IOException, InterruptedException, ClassNotFoundException { int retVal = 0; long start = System.nanoTime(); if (isAsync) { job.submit(); } else { job.waitForCompletion(true); retVal = job.isSuccessful() ? 0 : 1; logger.debug("Job '" + job.getJobName() + "' finished " + (job.isSuccessful() ? "successfully in " : "with failures. Time taken ") + formatTime((System.nanoTime() - start) / 1000000L)); } return retVal; }
public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper) { try { Map<String, String> taskDiagsMap = new HashMap<>(); TaskCompletionEvent[] completionEvents = failedJob.getTaskCompletionEvents(0, 100); for (TaskCompletionEvent tce : completionEvents) { String[] taskDiags = failedJob.getTaskDiagnostics(tce.getTaskAttemptId()); String combinedTaskDiags = ""; for (String taskDiag : taskDiags) { combinedTaskDiags += taskDiag; } taskDiagsMap.put(tce.getTaskAttemptId().toString(), combinedTaskDiags); } return jsonMapper.writeValueAsString(taskDiagsMap); } catch (IOException | InterruptedException ie) { log.error(ie, "couldn't get failure cause for job [%s]", failedJob.getJobName()); return null; } } }
protected void runJob(String jobName, Configuration c, List<Scan> scans) throws IOException, InterruptedException, ClassNotFoundException { Job job = new Job(c, jobName); initJob(scans, job); job.setReducerClass(ScanReducer.class); job.setNumReduceTasks(1); // one to get final "first" and "last" key FileOutputFormat.setOutputPath(job, new Path(job.getJobName())); LOG.info("Started " + job.getJobName()); job.waitForCompletion(true); assertTrue(job.isSuccessful()); LOG.info("After map/reduce completion - job " + jobName); }
SequenceFileOutputFormat.setOutputPath(job, new Path(dictOutputPath)); logger.info("Starting: " + job.getJobName());
job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_ID, segmentID); logger.info("Starting: " + job.getJobName());
job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_ID, segmentID); job.getConfiguration().set(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT, statistics_sampling_percent); logger.info("Starting: " + job.getJobName());
CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(optSegment); logger.info("Starting: " + job.getJobName());
CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(optSegment); logger.info("Starting: " + job.getJobName());
public static void cleanup(Job job) throws IOException { final Path jobDir = getJobPath(job.getJobID(), job.getWorkingDirectory()); final FileSystem fs = jobDir.getFileSystem(job.getConfiguration()); RuntimeException e = null; try { JobHelper.deleteWithRetry(fs, jobDir, true); } catch (RuntimeException ex) { e = ex; } try { JobHelper.deleteWithRetry(fs, getJobClassPathDir(job.getJobName(), job.getWorkingDirectory()), true); } catch (RuntimeException ex) { if (e == null) { e = ex; } else { e.addSuppressed(ex); } } if (e != null) { throw e; } }
job.getConfiguration().set(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT, statistics_sampling_percent); logger.info("Starting: " + job.getJobName());
/** * Tests an MR Scan initialized from properties set in the Configuration. */ protected void testScanFromConfiguration(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException { String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty"); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); c.set(TableInputFormat.INPUT_TABLE, TABLE_NAME.getNameAsString()); c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILYS[0]) + ", " + Bytes.toString(INPUT_FAMILYS[1])); c.set(KEY_STARTROW, start != null ? start : ""); c.set(KEY_LASTROW, last != null ? last : ""); if (start != null) { c.set(TableInputFormat.SCAN_ROW_START, start); } if (stop != null) { c.set(TableInputFormat.SCAN_ROW_STOP, stop); } Job job = Job.getInstance(c, jobName); job.setMapperClass(ScanMapper.class); job.setReducerClass(ScanReducer.class); job.setMapOutputKeyClass(ImmutableBytesWritable.class); job.setMapOutputValueClass(ImmutableBytesWritable.class); job.setInputFormatClass(TableInputFormat.class); job.setNumReduceTasks(1); FileOutputFormat.setOutputPath(job, new Path(job.getJobName())); TableMapReduceUtil.addDependencyJars(job); assertTrue(job.waitForCompletion(true)); }
log.info("Job %s submitted, status available at %s", job.getJobName(), job.getTrackingURL()); log.info("No counters found for job [%s]", job.getJobName()); } else { Counter invalidRowCount = counters.findCounter(HadoopDruidIndexerConfig.IndexJobCounters.INVALID_ROW_COUNTER); jobStats.setInvalidRowCount(invalidRowCount.getValue()); } else { log.info("No invalid row counter found for job [%s]", job.getJobName());
job.setReducerClass(ScanReducer.class); job.setNumReduceTasks(1); // one to get final "first" and "last" key FileOutputFormat.setOutputPath(job, new Path(job.getJobName())); LOG.info("Started " + job.getJobName()); assertTrue(job.waitForCompletion(true)); LOG.info("After map/reduce completion - job " + jobName);
JobHelper.distributedClassPath(getJobClassPathDir(job.getJobName(), jobConf.getWorkingDirectory())), job ); try { job.submit(); log.info("Job %s submitted, status available at %s", job.getJobName(), job.getTrackingURL()); final boolean success = job.waitForCompletion(true); if (!success) {
@Override public void run() { try { Job job = createJob(); if (job == null) { log.info("No MR job created. Skipping."); this.workingState = WorkUnitState.WorkingState.SUCCESSFUL; this.eventSubmitter.submit(Events.MR_JOB_SKIPPED); onSkippedMRJob(); return; } job.submit(); log.info("MR tracking URL {} for job {}", job.getTrackingURL(), job.getJobName()); this.eventSubmitter.submit(Events.MR_JOB_STARTED_EVENT, Events.JOB_URL, job.getTrackingURL()); job.waitForCompletion(false); this.mrJob = job; if (job.isSuccessful()) { this.eventSubmitter.submit(Events.MR_JOB_SUCCESSFUL, Events.JOB_URL, job.getTrackingURL()); this.onMRTaskComplete(true, null); } else { this.eventSubmitter.submit(Events.MR_JOB_FAILED, Events.JOB_URL, job.getTrackingURL()); this.onMRTaskComplete (false, new IOException(String.format("MR Job:%s is not successful", job.getTrackingURL()))); } } catch (Throwable t) { log.error("Failed to run MR job.", t); this.eventSubmitter.submit(Events.MR_JOB_FAILED, Events.FAILURE_CONTEXT, t.getMessage()); this.onMRTaskComplete (false, t); } }
log.info("Job %s submitted, status available at: %s", groupByJob.getJobName(), groupByJob.getTrackingURL());
log.info("Job %s submitted, status available at: %s", groupByJob.getJobName(), groupByJob.getTrackingURL()); log.info( "Job %s submitted, status available at: %s", dimSelectionJob.getJobName(), dimSelectionJob.getTrackingURL() );
LOG.info("Launching Hadoop MR job " + this.job.getJobName()); this.job.submit(); this.hadoopJobSubmitted = true;
configurer.addOutputFormat("out3", NullOutputFormat.class, Text.class, IntWritable.class); Path outDir = new Path(workDir.getPath(), job.getJobName()); FileOutputFormat.setOutputPath(configurer.getJob("out1"), new Path(outDir, "out1")); FileOutputFormat.setOutputPath(configurer.getJob("out2"), new Path(outDir, "out2"));