@Override public MapReduceManager getMapReduceManager(String programName) { ProgramId programId = application.mr(programName); return new DefaultMapReduceManager(Id.Program.fromEntityId(programId), this); }
private void registerDatasets(ApplicationWithPrograms input) { ApplicationSpecification appSpec = input.getSpecification(); ApplicationId appId = input.getApplicationId(); NamespaceId namespaceId = appId.getParent(); for (MapReduceSpecification program : appSpec.getMapReduce().values()) { ProgramId programId = appId.mr(program.getName()); for (String dataset : program.getDataSets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } for (SparkSpecification sparkSpec : appSpec.getSpark().values()) { ProgramId programId = appId.spark(sparkSpec.getName()); for (String dataset : sparkSpec.getDatasets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) { ProgramId programId = appId.service(serviceSpecification.getName()); for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) { for (String dataset : handlerSpecification.getDatasets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } } } }
.collect(Collectors.toList()); Collection<ProgramId> programs = apps.stream() .flatMap(app -> Stream.of(app.mr(program1), app.mr(program2))) .collect(Collectors.toList()); expected.put(namespace.app(app1).mr(program1), activeStates); expected.put(namespace.app(app1).mr(program2), activeStates); expected.put(namespace.app(app2).mr(program1), activeStates); expected.put(namespace.app(app2).mr(program2), activeStates); actual.put(namespace.app(app1).mr(program1), new HashSet<>()); actual.put(namespace.app(app1).mr(program2), new HashSet<>()); actual.put(namespace.app(app2).mr(program1), new HashSet<>()); actual.put(namespace.app(app2).mr(program2), new HashSet<>()); allActual.putAll(actual); for (Map.Entry<ProgramRunId, RunRecordMeta> activeRun : activeRuns.entrySet()) { expected.put(app.mr(program1), activeStates); expected.put(app.mr(program2), activeStates); actual.put(app.mr(program1), new HashSet<>()); actual.put(app.mr(program2), new HashSet<>()); for (Map.Entry<ProgramRunId, RunRecordMeta> activeRun : activeRuns.entrySet()) { ProgramId programId = activeRun.getKey().getParent();
ProgramId programId = appId.mr(program.getName()); for (String dataset : program.getDataSets()) { usageRegistry.register(programId, namespaceId.dataset(dataset));
ProgramRunId mrId = NamespaceId.DEFAULT.app("app").mr("mr").run(randomRunId()); store.setProvisioning(mrId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, Bytes.toBytes(sourceId.getAndIncrement()), artifactId); workflowId.getParent().getParent().mr("mrInWorkflow").run(randomRunId()); store.setProvisioning(mrInWorkflowId, Collections.emptyMap(), ImmutableMap.of(
@Test public void testLoadingProgram() throws Exception { ApplicationSpecification appSpec = Specifications.from(new FooApp()); ApplicationId appId = NamespaceId.DEFAULT.app(appSpec.getName()); store.addApplication(appId, appSpec); ProgramDescriptor descriptor = store.loadProgram(appId.mr("mrJob1")); Assert.assertNotNull(descriptor); MapReduceSpecification mrSpec = descriptor.getSpecification(); Assert.assertEquals("mrJob1", mrSpec.getName()); Assert.assertEquals(FooMapReduceJob.class.getName(), mrSpec.getClassName()); }
ProgramId mapreduceProgramId = appId.mr("NoOpMR"); ProgramId workflowProgramId = appId.workflow("NoOpWorkflow");
@Test @SuppressWarnings("unchecked") public void testExists() throws NotFoundException { existenceVerifier.ensureExists(new InstanceId(EXISTS)); existenceVerifier.ensureExists(NAMESPACE); existenceVerifier.ensureExists(ARTIFACT); ApplicationId app = NAMESPACE.app(AllProgramsApp.NAME); existenceVerifier.ensureExists(app); existenceVerifier.ensureExists(app.mr(AllProgramsApp.NoOpMR.NAME)); existenceVerifier.ensureExists(NAMESPACE.dataset(AllProgramsApp.DATASET_NAME)); }
@Test public void testDoesNotExist() { assertDoesNotExist(new InstanceId(DOES_NOT_EXIST)); assertDoesNotExist(new NamespaceId(DOES_NOT_EXIST)); assertDoesNotExist(NamespaceId.DEFAULT.artifact(DOES_NOT_EXIST, "1.0")); ApplicationId app = NamespaceId.DEFAULT.app(AllProgramsApp.NAME); assertDoesNotExist(NamespaceId.DEFAULT.app(DOES_NOT_EXIST)); assertDoesNotExist(app.mr(DOES_NOT_EXIST)); assertDoesNotExist(NamespaceId.DEFAULT.dataset(DOES_NOT_EXIST)); }
@Test public void testRunsLimit() { ApplicationSpecification spec = Specifications.from(new AllProgramsApp()); ApplicationId appId = new ApplicationId("testRunsLimit", spec.getName()); store.addApplication(appId, spec); ProgramId mapreduceProgramId = new ApplicationId("testRunsLimit", spec.getName()) .mr(AllProgramsApp.NoOpMR.class.getSimpleName()); ArtifactId artifactId = appId.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId(); Assert.assertNotNull(store.getApplication(appId)); long now = System.currentTimeMillis(); ProgramRunId programRunId = mapreduceProgramId.run(RunIds.generate(now - 3000)); setStartAndRunning(programRunId, artifactId); store.setStop(programRunId, now - 100, ProgramController.State.COMPLETED.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId)); setStartAndRunning(mapreduceProgramId.run(RunIds.generate(now - 2000)), artifactId); // even though there's two separate run records (one that's complete and one that's active), only one should be // returned by the query, because the limit parameter of 1 is being passed in. Map<ProgramRunId, RunRecordMeta> historymap = store.getRuns(mapreduceProgramId, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, 1); Assert.assertEquals(1, historymap.size()); }
@Test public void testGetProgramProfile() { ProfileId profileId = NamespaceId.DEFAULT.profile("p"); Map<String, String> args = Collections.singletonMap(SystemArguments.PROFILE_NAME, profileId.getScopedName()); ApplicationId appId = NamespaceId.DEFAULT.app("a"); ProgramId mrId = appId.mr("mr"); ProgramId serviceId = appId.service("serv"); ProgramId sparkId = appId.spark("spark"); ProgramId workerId = appId.worker("worker"); ProgramId workflowID = appId.workflow("wf"); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(mrId, args)); Assert.assertEquals(ProfileId.NATIVE, SystemArguments.getProfileIdForProgram(serviceId, args)); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(sparkId, args)); Assert.assertEquals(ProfileId.NATIVE, SystemArguments.getProfileIdForProgram(workerId, args)); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(workflowID, args)); }
ProgramId mapreduce = appId.mr(AppWithProgramsUsingGuava.NoOpMR.NAME); startProgram(Id.Program.fromEntityId(mapreduce)); waitForRuns(1, mapreduce, ProgramRunStatus.FAILED);
@Test public void testProgramStatusFromSingleRun() { RunRecordMeta record = RunRecordMeta.builder() .setProgramRunId(NamespaceId.DEFAULT.app("app").mr("mr").run(RunIds.generate())) .setStartTime(System.currentTimeMillis()) .setArtifactId(new ArtifactId("r", new ArtifactVersion("1.0"), ArtifactScope.USER))
ProgramId mapReduceProgram = appId.mr(mapReduceName); ProgramId sparkProgram = appId.spark(sparkName);
ScheduleId scheduleId2 = defaultAppId.schedule(AppWithSchedule.SCHEDULE); ProgramId programId = defaultAppId.workflow(AppWithSchedule.WORKFLOW_NAME); ProgramId mapReduceProgramId = defaultAppId.mr(AppWithSchedule.MAPREDUCE);
@Test public void testProgramStatusFromMultipleRuns() { ProgramId programId = NamespaceId.DEFAULT.app("app").mr("mr"); RunRecordMeta pending = RunRecordMeta.builder() .setProgramRunId(programId.run(RunIds.generate()))
store.addApplication(appId2, spec); ProgramId mapreduceProgramId1 = appId1.mr("NoOpMR"); ProgramId workflowProgramId1 = appId1.workflow("NoOpWorkflow"); ArtifactId artifactId = appId1.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();