private void checkAccess(Job job, JobACL jobOperation) throws IOException { UserGroupInformation callerUGI; callerUGI = UserGroupInformation.getCurrentUser(); if (!job.checkAccess(callerUGI, jobOperation)) { throw new IOException(new AccessControlException("User " + callerUGI.getShortUserName() + " cannot perform operation " + jobOperation.name() + " on " + job.getID())); } }
private void checkAccess(Job job, JobACL jobOperation) throws IOException { UserGroupInformation callerUGI; callerUGI = UserGroupInformation.getCurrentUser(); if (!job.checkAccess(callerUGI, jobOperation)) { throw new IOException(new AccessControlException("User " + callerUGI.getShortUserName() + " cannot perform operation " + jobOperation.name() + " on " + job.getID())); } }
private void checkAccess(Job job, JobACL jobOperation) throws IOException { UserGroupInformation callerUGI; callerUGI = UserGroupInformation.getCurrentUser(); if (!job.checkAccess(callerUGI, jobOperation)) { throw new IOException(new AccessControlException("User " + callerUGI.getShortUserName() + " cannot perform operation " + jobOperation.name() + " on " + job.getID())); } }
private void checkAccess(Job job, JobACL jobOperation) throws IOException { UserGroupInformation callerUGI; callerUGI = UserGroupInformation.getCurrentUser(); if (!job.checkAccess(callerUGI, jobOperation)) { throw new IOException(new AccessControlException("User " + callerUGI.getShortUserName() + " cannot perform operation " + jobOperation.name() + " on " + job.getID())); } }
public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobsPerApp; ++j) { Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask); map.put(job.getID(), job); } return map; }
public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp, int numTasksPerJob, int numAttemptsPerTask, boolean hasFailedTasks) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobsPerApp; ++j) { Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask, null, hasFailedTasks); map.put(job.getID(), job); } return map; }
@GET @Path("/jobs") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public JobsInfo getJobs(@Context HttpServletRequest hsr) { init(); JobsInfo allJobs = new JobsInfo(); for (Job job : appCtx.getAllJobs().values()) { // getAllJobs only gives you a partial we want a full Job fullJob = appCtx.getJob(job.getID()); if (fullJob == null) { continue; } allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr))); } return allJobs; }
@GET @Path("/jobs") @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobsInfo getJobs(@Context HttpServletRequest hsr) { init(); JobsInfo allJobs = new JobsInfo(); for (Job job : appCtx.getAllJobs().values()) { // getAllJobs only gives you a partial we want a full Job fullJob = appCtx.getJob(job.getID()); if (fullJob == null) { continue; } allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr))); } return allJobs; }
/** * This can be overridden to instantiate multiple jobs and create a * workflow. * * TODO: Rework the design to actually support this. Currently much of the * job stuff has been moved to init() above to support uberization (MR-1220). * In a typical workflow, one presumably would want to uberize only a subset * of the jobs (the "small" ones), which is awkward with the current design. */ @SuppressWarnings("unchecked") protected void startJobs() { /** create a job-start event to get this ball rolling */ JobEvent startJobEvent = new JobStartEvent(job.getID(), recoveredJobStartTime); /** send the job-start event. this triggers the job execution. */ dispatcher.getEventHandler().handle(startJobEvent); }
@GET @Path("/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = getJobFromJobIdString(jid, appCtx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString( job.getID()), job.getUserName()); amAttempts.add(attempt); } return amAttempts; }
@GET @Path("/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = getJobFromJobIdString(jid, appCtx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString( job.getID()), job.getUserName()); amAttempts.add(attempt); } return amAttempts; }
@GET @Path("/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = getJobFromJobIdString(jid, appCtx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString( job.getID()), job.getUserName()); amAttempts.add(attempt); } return amAttempts; }
@GET @Path("/mapreduce/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = AMWebServices.getJobFromJobIdString(jid, ctx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job .getID()), job.getUserName(), uriInfo.getBaseUri().toString(), webapp.name()); amAttempts.add(attempt); } return amAttempts; }
@GET @Path("/mapreduce/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = AMWebServices.getJobFromJobIdString(jid, ctx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job .getID()), job.getUserName(), uriInfo.getBaseUri().toString(), webapp.name()); amAttempts.add(attempt); } return amAttempts; }
/** * Create numJobs in a map with jobs having appId==jobId */ public static Map<JobId, Job> newJobs(int numJobs, int numTasksPerJob, int numAttemptsPerTask) { Map<JobId, Job> map = Maps.newHashMap(); for (int j = 0; j < numJobs; ++j) { ApplicationId appID = MockJobs.newAppID(j); Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask); map.put(job.getID(), job); } return map; }
@GET @Path("/mapreduce/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = AMWebServices.getJobFromJobIdString(jid, ctx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); for (AMInfo amInfo : job.getAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job .getID()), job.getUserName(), uriInfo.getBaseUri().toString(), webapp.name()); amAttempts.add(attempt); } return amAttempts; }
public MockAppContext(int appid, int numTasks, int numAttempts, Path confPath) { appID = MockJobs.newAppID(appid); appAttemptID = ApplicationAttemptId.newInstance(appID, 0); Map<JobId, Job> map = Maps.newHashMap(); Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath); map.put(job.getID(), job); jobs = map; }
public JobCounterInfo(AppContext ctx, Job job) { getCounters(ctx, job); counterGroup = new ArrayList<CounterGroupInfo>(); this.id = MRApps.toString(job.getID()); if (total != null) { for (CounterGroup g : total) { if (g != null) { CounterGroup mg = map == null ? null : map.getGroup(g.getName()); CounterGroup rg = reduce == null ? null : reduce .getGroup(g.getName()); CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg); counterGroup.add(cginfo); } } } }
@Test public void testSingleCounterView() { AppContext appContext = new MockAppContext(0, 1, 1, 1); Job job = appContext.getAllJobs().values().iterator().next(); // add a failed task to the job without any counters Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true); Map<TaskId,Task> tasks = job.getTasks(); tasks.put(failedTask.getID(), failedTask); Map<String, String> params = getJobParams(appContext); params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter"); params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS"); WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params); }
@Test public void testJobRebootNotLastRetryOnUnregistrationFailure() throws Exception { MRApp app = new MRApp(1, 0, false, this.getClass().getName(), true); Job job = app.submit(new Configuration()); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task task = it.next(); app.waitForState(task, TaskState.RUNNING); //send an reboot event app.getContext().getEventHandler().handle(new JobEvent(job.getID(), JobEventType.JOB_AM_REBOOT)); // return exteranl state as RUNNING since otherwise the JobClient will // prematurely exit. app.waitForState(job, JobState.RUNNING); }