props.put(entry.getKey(), entry.getValue()); return new HadoopDefaultJobInfo(jobConf.getJobName(), jobConf.getUser(), hasCombiner, numReduces, props, credentials);
job.getJobName() + "' to " + job.getQueueName() + " queue. " + "(current delta dirs count=" + curDirNumber + ", obsolete delta dirs count=" + obsoleteDirNumber + ". TxnIdRange[" + minTxn + "," + maxTxn + "]"); RunningJob rj = new JobClient(job).submitJob(job); LOG.info("Submitted compaction job '" + job.getJobName() + "' with jobID=" + rj.getID() + " compaction ID=" + id); txnHandler.setHadoopJobId(rj.getID().toString(), id); rj.waitForCompletion();
job.getJobName() + "' to " + job.getQueueName() + " queue. " + "(current delta dirs count=" + curDirNumber + ", obsolete delta dirs count=" + obsoleteDirNumber + ". TxnIdRange[" + minTxn + "," + maxTxn + "]"); jc = new JobClient(job); RunningJob rj = jc.submitJob(job); LOG.info("Submitted compaction job '" + job.getJobName() + "' with jobID=" + rj.getID() + " compaction ID=" + id); try {
@Override protected void runJob(String jobName, Configuration c, List<Scan> scans) throws IOException, InterruptedException, ClassNotFoundException { JobConf job = new JobConf(TEST_UTIL.getConfiguration()); job.setJobName(jobName); job.setMapperClass(Mapper.class); job.setReducerClass(Reducer.class); TableMapReduceUtil.initMultiTableSnapshotMapperJob(getSnapshotScanMapping(scans), Mapper.class, ImmutableBytesWritable.class, ImmutableBytesWritable.class, job, true, restoreDir); TableMapReduceUtil.addDependencyJars(job); job.setReducerClass(Reducer.class); job.setNumReduceTasks(1); // one to get final "first" and "last" key FileOutputFormat.setOutputPath(job, new Path(job.getJobName())); LOG.info("Started " + job.getJobName()); RunningJob runningJob = JobClient.runJob(job); runningJob.waitForCompletion(); assertTrue(runningJob.isSuccessful()); LOG.info("After map/reduce completion - job " + jobName); }
@Test @SuppressWarnings({ "deprecation" }) public void shouldCreateAndRunSubmittableJob() throws Exception { RowCounter rCounter = new RowCounter(); rCounter.setConf(HBaseConfiguration.create()); String[] args = new String[] { "\temp", "tableA", "column1", "column2", "column3" }; JobConf jobConfig = rCounter.createSubmittableJob(args); assertNotNull(jobConfig); assertEquals(0, jobConfig.getNumReduceTasks()); assertEquals("rowcounter", jobConfig.getJobName()); assertEquals(jobConfig.getMapOutputValueClass(), Result.class); assertEquals(jobConfig.getMapperClass(), RowCounterMapper.class); assertEquals(jobConfig.get(TableInputFormat.COLUMN_LIST), Joiner.on(' ') .join("column1", "column2", "column3")); assertEquals(jobConfig.getMapOutputKeyClass(), ImmutableBytesWritable.class); }
/** * Get the user-specified job name. This is only used to identify the * job to the user. * * @return the job's name, defaulting to "". */ public String getJobName() { return conf.getJobName(); }
/** * Get the user-specified job name. This is only used to identify the * job to the user. * * @return the job's name, defaulting to "". */ public String getJobName() { return conf.getJobName(); }
/** * Get the user-specified job name. This is only used to identify the * job to the user. * * @return the job's name, defaulting to "". */ public String getJobName() { return conf.getJobName(); }
when(jobConf.getJobName()).thenReturn(TEST_JOB_NAME);
/** * Constructor MapReduceFlow creates a new MapReduceFlow instance. * * @param jobConf of type JobConf */ @ConstructorProperties({"jobConf"}) public MapReduceFlow( JobConf jobConf ) { this( jobConf.getJobName(), jobConf, false ); }
/** * Get the job name from the job conf */ static String getJobName(JobConf jobConf) { String jobName = jobConf.getJobName(); if (jobName == null || jobName.length() == 0) { jobName = "NA"; } return jobName; }
/** * Constructor MapReduceFlow creates a new MapReduceFlow instance. * * @param jobConf of type JobConf * @param deleteSinkOnInit of type boolean */ @ConstructorProperties({"jobConf", "deleteSinkOnInit"}) public MapReduceFlow( JobConf jobConf, boolean deleteSinkOnInit ) { this( jobConf.getJobName(), jobConf, deleteSinkOnInit ); }
/** * Constructor MapReduceFlow creates a new MapReduceFlow instance. * * @param jobConf of type JobConf * @param deleteSinkOnInit of type boolean */ @ConstructorProperties({"jobConf", "deleteSinkOnInit"}) public MapReduceFlow( JobConf jobConf, boolean deleteSinkOnInit ) { this( jobConf.getJobName(), jobConf, deleteSinkOnInit ); }
public MapReduceFlowStep( HadoopFlow flow, JobConf jobConf ) { if( flow == null ) throw new IllegalArgumentException( "flow may not be null" ); setName( jobConf.getJobName() ); setFlow( flow ); this.jobConf = jobConf; configure(); // requires flow and jobConf }
public MapReduceFlowStep( HadoopFlow flow, JobConf jobConf ) { if( flow == null ) throw new IllegalArgumentException( "flow may not be null" ); setName( jobConf.getJobName() ); setFlow( flow ); this.jobConf = jobConf; configure(); // requires flow and jobConf }
MyFakeJobInProgress(JobConf jc, JobTracker jt) throws IOException { super((jobid = new JobID(jtIdentifier, jobCounter ++)), jc, jt); Path jobFile = new Path("Dummy"); this.profile = new JobProfile(jc.getUser(), jobid, jobFile.toString(), null, jc.getJobName(), jc.getQueueName()); this.jobHistory = new FakeJobHistory(); }
public FakeJobInProgress(JobConf jobConf, FakeTaskTrackerManager taskTrackerManager, JobTracker jt) throws IOException { super(new JobID("test", ++jobCounter), jobConf, jt); this.taskTrackerManager = taskTrackerManager; this.startTime = System.currentTimeMillis(); this.status = new JobStatus(getJobID(), 0f, 0f, JobStatus.PREP, jobConf.getUser(), jobConf.getJobName(), "", ""); this.status.setJobPriority(JobPriority.NORMAL); this.status.setStartTime(startTime); }
public FakeJobInProgress(JobConf jobConf, FakeTaskTrackerManager taskTrackerManager, JobTracker jt) throws IOException { super(new JobID("test", ++jobCounter), jobConf, jt); this.startTime = System.currentTimeMillis(); this.status = new JobStatus(getJobID(), 0f, 0f, JobStatus.PREP, jobConf.getUser(), jobConf.getJobName(), "", ""); this.status.setJobPriority(JobPriority.NORMAL); this.status.setStartTime(startTime); this.jobHistory = new FakeJobHistory(); }
ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf) { this.numCopiers = jobConf.getInt(MRJobConfig.SHUFFLE_PARALLEL_COPIES, 5); MetricsContext metricsContext = MetricsUtil.getContext("mapred"); this.shuffleMetrics = MetricsUtil.createRecord(metricsContext, "shuffleInput"); this.shuffleMetrics.setTag("user", jobConf.getUser()); this.shuffleMetrics.setTag("jobName", jobConf.getJobName()); this.shuffleMetrics.setTag("jobId", reduceId.getJobID().toString()); this.shuffleMetrics.setTag("taskId", reduceId.toString()); this.shuffleMetrics.setTag("sessionId", jobConf.getSessionId()); metricsContext.registerUpdater(this); } public synchronized void inputBytes(long numBytes) {
ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf) { this.numCopiers = jobConf.getInt(MRJobConfig.SHUFFLE_PARALLEL_COPIES, 5); MetricsContext metricsContext = MetricsUtil.getContext("mapred"); this.shuffleMetrics = MetricsUtil.createRecord(metricsContext, "shuffleInput"); this.shuffleMetrics.setTag("user", jobConf.getUser()); this.shuffleMetrics.setTag("jobName", jobConf.getJobName()); this.shuffleMetrics.setTag("jobId", reduceId.getJobID().toString()); this.shuffleMetrics.setTag("taskId", reduceId.toString()); this.shuffleMetrics.setTag("sessionId", jobConf.getSessionId()); metricsContext.registerUpdater(this); } public synchronized void inputBytes(long numBytes) {