@Override public SparkManager getSparkManager(String jobName) { return new DefaultSparkManager(application.spark(jobName), this, discoveryServiceClient); }
@Override public SparkManager getSparkManager(String jobName) { return new RemoteSparkManager(application.spark(jobName), this, clientConfig, restClient); }
@Override public SparkManager getSparkManager(String jobName) { return new DefaultSparkManager(application.spark(jobName), this, discoveryServiceClient); }
@Test public void testCrossNSSpark() throws Exception { createAuthNamespace(); ApplicationId appId = AUTH_NAMESPACE.app(TestSparkCrossNSDatasetApp.APP_NAME); Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder() .put(appId, EnumSet.of(Action.ADMIN)) .put(AUTH_NAMESPACE.artifact(TestSparkCrossNSDatasetApp.class.getSimpleName(), "1.0-SNAPSHOT"), EnumSet.of(Action.ADMIN)) .put(AUTH_NAMESPACE.dataset(TestSparkCrossNSDatasetApp.DEFAULT_OUTPUT_DATASET), EnumSet.of(Action.ADMIN)) .put(AUTH_NAMESPACE.datasetType(KeyValueTable.class.getName()), EnumSet.of(Action.ADMIN)) .build(); setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges); ProgramId programId = appId.spark(TestSparkCrossNSDatasetApp.SPARK_PROGRAM_NAME); // bob will be executing the program grantAndAssertSuccess(programId, BOB, EnumSet.of(Action.EXECUTE)); cleanUpEntities.add(programId); ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, TestSparkCrossNSDatasetApp.class); SparkManager sparkManager = appManager.getSparkManager(TestSparkCrossNSDatasetApp.SparkCrossNSDatasetProgram .class.getSimpleName()); testCrossNSSystemDatasetAccessWithAuthSpark(sparkManager); testCrossNSDatasetAccessWithAuthSpark(sparkManager); }
private void registerDatasets(ApplicationWithPrograms input) { ApplicationSpecification appSpec = input.getSpecification(); ApplicationId appId = input.getApplicationId(); NamespaceId namespaceId = appId.getParent(); for (MapReduceSpecification program : appSpec.getMapReduce().values()) { ProgramId programId = appId.mr(program.getName()); for (String dataset : program.getDataSets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } for (SparkSpecification sparkSpec : appSpec.getSpark().values()) { ProgramId programId = appId.spark(sparkSpec.getName()); for (String dataset : sparkSpec.getDatasets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) { ProgramId programId = appId.service(serviceSpecification.getName()); for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) { for (String dataset : handlerSpecification.getDatasets()) { usageRegistry.register(programId, namespaceId.dataset(dataset)); } } } } }
ProgramId programId = appId.spark(sparkSpec.getName()); for (String dataset : sparkSpec.getDatasets()) { usageRegistry.register(programId, namespaceId.dataset(dataset));
@Test public void testGetTargetTypeChild() { ApplicationId expectedAppId = new ApplicationId("ns1", "app1"); MDSKey mdsValueKey = MetadataKey.createValueRowKey(expectedAppId.toMetadataEntity(), "key1"); ProgramId expectedProgramId = expectedAppId.spark("spark1"); MDSKey mdsValueKey2 = MetadataKey.createValueRowKey(expectedProgramId.toMetadataEntity(), "key2"); // assert that the key for parent child are independent and correct MetadataEntity actualAppId = MetadataKey.extractMetadataEntityFromKey(mdsValueKey.getKey()); Assert.assertEquals(expectedAppId.toMetadataEntity(), actualAppId); MetadataEntity actualProgramId = MetadataKey.extractMetadataEntityFromKey(mdsValueKey2.getKey()); Assert.assertEquals(expectedProgramId.toMetadataEntity(), actualProgramId); }
@Test public void testGetProgramProfile() { ProfileId profileId = NamespaceId.DEFAULT.profile("p"); Map<String, String> args = Collections.singletonMap(SystemArguments.PROFILE_NAME, profileId.getScopedName()); ApplicationId appId = NamespaceId.DEFAULT.app("a"); ProgramId mrId = appId.mr("mr"); ProgramId serviceId = appId.service("serv"); ProgramId sparkId = appId.spark("spark"); ProgramId workerId = appId.worker("worker"); ProgramId workflowID = appId.workflow("wf"); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(mrId, args)); Assert.assertEquals(ProfileId.NATIVE, SystemArguments.getProfileIdForProgram(serviceId, args)); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(sparkId, args)); Assert.assertEquals(ProfileId.NATIVE, SystemArguments.getProfileIdForProgram(workerId, args)); Assert.assertEquals(profileId, SystemArguments.getProfileIdForProgram(workflowID, args)); }
ProgramId sparkProgram = appId.spark(sparkName);
final DatasetId datasetInstance2 = NamespaceId.DEFAULT.dataset("dataset2"); final ProgramId program1 = NamespaceId.DEFAULT.app("app1").spark("spark1"); final ProgramId program2 = NamespaceId.DEFAULT.app("app2").worker("worker2"); final ProgramId program3 = NamespaceId.DEFAULT.app("app3").service("service3");