@Override public Map<String, MapReduceSpecification> getMapReduce() { return delegate.getMapReduce(); }
@Override public Map<String, MapReduceSpecification> getMapReduce() { return delegate.getMapReduce(); }
private Iterable<ProgramSpecification> getProgramSpecs(ApplicationId appId) { ApplicationSpecification appSpec = store.getApplication(appId); return Iterables.concat(appSpec.getMapReduce().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values(), appSpec.getWorkflows().values()); }
private Iterable<ProgramSpecification> getProgramSpecs(ApplicationId appId) { ApplicationSpecification appSpec = store.getApplication(appId); return Iterables.concat(appSpec.getFlows().values(), appSpec.getMapReduce().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values(), appSpec.getWorkflows().values()); }
@Override protected void validateOptions(Program program, ProgramOptions options) { super.validateOptions(program, options); // Extract and verify parameters ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification."); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type."); Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MapReduce process type is supported."); MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName()); Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName()); }
@Override protected void validateOptions(Program program, ProgramOptions options) { super.validateOptions(program, options); // Extract and verify parameters ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification."); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type."); Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MapReduce process type is supported."); MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName()); Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName()); }
private Set<ProgramId> getAllPrograms(ApplicationId appId, ApplicationSpecification appSpec) { Set<ProgramId> result = new HashSet<>(); result.addAll(getProgramsWithType(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce())); result.addAll(getProgramsWithType(appId, ProgramType.WORKFLOW, appSpec.getWorkflows())); result.addAll(getProgramsWithType(appId, ProgramType.SERVICE, appSpec.getServices())); result.addAll(getProgramsWithType(appId, ProgramType.SPARK, appSpec.getSpark())); result.addAll(getProgramsWithType(appId, ProgramType.WORKER, appSpec.getWorkers())); return result; }
private void addPrograms(ImmutableMap.Builder<String, String> properties) { addPrograms(ProgramType.MAPREDUCE, appSpec.getMapReduce().values(), properties); addPrograms(ProgramType.SERVICE, appSpec.getServices().values(), properties); addPrograms(ProgramType.SPARK, appSpec.getSpark().values(), properties); addPrograms(ProgramType.WORKER, appSpec.getWorkers().values(), properties); addPrograms(ProgramType.WORKFLOW, appSpec.getWorkflows().values(), properties); }
private Set<ProgramId> getAllPrograms(ApplicationId appId, ApplicationSpecification appSpec) { Set<ProgramId> result = new HashSet<>(); result.addAll(getProgramsWithType(appId, ProgramType.FLOW, appSpec.getFlows())); result.addAll(getProgramsWithType(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce())); result.addAll(getProgramsWithType(appId, ProgramType.WORKFLOW, appSpec.getWorkflows())); result.addAll(getProgramsWithType(appId, ProgramType.SERVICE, appSpec.getServices())); result.addAll(getProgramsWithType(appId, ProgramType.SPARK, appSpec.getSpark())); result.addAll(getProgramsWithType(appId, ProgramType.WORKER, appSpec.getWorkers())); return result; }
private void addPrograms(ImmutableMap.Builder<String, String> properties) { addPrograms(ProgramType.FLOW, appSpec.getFlows().values(), properties); addPrograms(ProgramType.MAPREDUCE, appSpec.getMapReduce().values(), properties); addPrograms(ProgramType.SERVICE, appSpec.getServices().values(), properties); addPrograms(ProgramType.SPARK, appSpec.getSpark().values(), properties); addPrograms(ProgramType.WORKER, appSpec.getWorkers().values(), properties); addPrograms(ProgramType.WORKFLOW, appSpec.getWorkflows().values(), properties); }
private boolean programExists(ProgramId id, ApplicationSpecification appSpec) { switch (id.getType()) { case MAPREDUCE: return appSpec.getMapReduce().containsKey(id.getProgram()); case SERVICE: return appSpec.getServices().containsKey(id.getProgram()); case SPARK: return appSpec.getSpark().containsKey(id.getProgram()); case WORKER: return appSpec.getWorkers().containsKey(id.getProgram()); case WORKFLOW: return appSpec.getWorkflows().containsKey(id.getProgram()); default: throw new IllegalArgumentException("Unexpected ProgramType " + id.getType()); } }
private boolean programExists(ProgramId id, ApplicationSpecification appSpec) { switch (id.getType()) { case FLOW: return appSpec.getFlows().containsKey(id.getProgram()); case MAPREDUCE: return appSpec.getMapReduce().containsKey(id.getProgram()); case SERVICE: return appSpec.getServices().containsKey(id.getProgram()); case SPARK: return appSpec.getSpark().containsKey(id.getProgram()); case WORKER: return appSpec.getWorkers().containsKey(id.getProgram()); case WORKFLOW: return appSpec.getWorkflows().containsKey(id.getProgram()); default: throw new IllegalArgumentException("Unexpected ProgramType " + id.getType()); } }
@Override public void process(final ApplicationDeployable input) throws Exception { List<ProgramDescriptor> programDescriptors = new ArrayList<>(); final ApplicationSpecification appSpec = input.getSpecification(); // Now, we iterate through all ProgramSpecification and generate programs Iterable<ProgramSpecification> specifications = Iterables.concat( appSpec.getMapReduce().values(), appSpec.getFlows().values(), appSpec.getWorkflows().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values() ); for (ProgramSpecification spec: specifications) { ProgramType type = ProgramTypes.fromSpecification(spec); ProgramId programId = input.getApplicationId().program(type, spec.getName()); programDescriptors.add(new ProgramDescriptor(programId, appSpec)); } emit(new ApplicationWithPrograms(input, programDescriptors)); } }
@Override public void process(final ApplicationDeployable input) throws Exception { List<ProgramDescriptor> programDescriptors = new ArrayList<>(); final ApplicationSpecification appSpec = input.getSpecification(); // Now, we iterate through all ProgramSpecification and generate programs Iterable<ProgramSpecification> specifications = Iterables.concat( appSpec.getMapReduce().values(), appSpec.getWorkflows().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values() ); for (ProgramSpecification spec: specifications) { ProgramType type = ProgramTypes.fromSpecification(spec); ProgramId programId = input.getApplicationId().program(type, spec.getName()); programDescriptors.add(new ProgramDescriptor(programId, appSpec)); } emit(new ApplicationWithPrograms(input, programDescriptors)); } }
@Override public JsonElement serialize(ApplicationSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("appVersion", new JsonPrimitive(src.getAppVersion())); if (src.getConfiguration() != null) { jsonObj.add("configuration", new JsonPrimitive(src.getConfiguration())); } jsonObj.add("artifactId", context.serialize(src.getArtifactId())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("datasetModules", serializeMap(src.getDatasetModules(), context, String.class)); jsonObj.add("datasetInstances", serializeMap(src.getDatasets(), context, DatasetCreationSpec.class)); jsonObj.add("mapReduces", serializeMap(src.getMapReduce(), context, MapReduceSpecification.class)); jsonObj.add("sparks", serializeMap(src.getSpark(), context, SparkSpecification.class)); jsonObj.add("workflows", serializeMap(src.getWorkflows(), context, WorkflowSpecification.class)); jsonObj.add("services", serializeMap(src.getServices(), context, ServiceSpecification.class)); jsonObj.add("programSchedules", serializeMap(src.getProgramSchedules(), context, ScheduleCreationSpec.class)); jsonObj.add("workers", serializeMap(src.getWorkers(), context, WorkerSpecification.class)); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); return jsonObj; }
@Override public void process(ApplicationWithPrograms input) { // use current time as creation time for app and all programs creationTime = String.valueOf(System.currentTimeMillis()); // add system metadata for apps ApplicationId appId = input.getApplicationId(); ApplicationSpecification appSpec = input.getSpecification(); new AppSystemMetadataWriter(metadataPublisher, appId, appSpec, creationTime).write(); // add system metadata for programs writeProgramSystemMetadata(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce().values()); writeProgramSystemMetadata(appId, ProgramType.SERVICE, appSpec.getServices().values()); writeProgramSystemMetadata(appId, ProgramType.SPARK, appSpec.getSpark().values()); writeProgramSystemMetadata(appId, ProgramType.WORKER, appSpec.getWorkers().values()); writeProgramSystemMetadata(appId, ProgramType.WORKFLOW, appSpec.getWorkflows().values()); // Emit input to the next stage emit(input); }
@Override public void process(ApplicationWithPrograms input) throws Exception { // add system metadata for apps ApplicationId appId = input.getApplicationId(); ApplicationSpecification appSpec = input.getSpecification(); // only update creation time if this is a new app Map<String, String> properties = metadataStore.getProperties(MetadataScope.SYSTEM, appId.toMetadataEntity()); SystemMetadataWriter appSystemMetadataWriter = new AppSystemMetadataWriter(metadataStore, appId, appSpec, !properties.isEmpty()); appSystemMetadataWriter.write(); // add system metadata for programs writeProgramSystemMetadata(appId, ProgramType.FLOW, appSpec.getFlows().values()); writeProgramSystemMetadata(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce().values()); writeProgramSystemMetadata(appId, ProgramType.SERVICE, appSpec.getServices().values()); writeProgramSystemMetadata(appId, ProgramType.SPARK, appSpec.getSpark().values()); writeProgramSystemMetadata(appId, ProgramType.WORKER, appSpec.getWorkers().values()); writeProgramSystemMetadata(appId, ProgramType.WORKFLOW, appSpec.getWorkflows().values()); // Emit input to the next stage emit(input); }
@Test public void testMapReduceDriverResources() throws Exception { final ApplicationWithPrograms app = deployApp(AppWithMapReduce.class); MapReduceSpecification mrSpec = app.getSpecification().getMapReduce().get(AppWithMapReduce.ClassicWordCount.class.getSimpleName()); Assert.assertEquals(AppWithMapReduce.ClassicWordCount.MEMORY_MB, mrSpec.getDriverResources().getMemoryMB()); }
@Test public void testAddApplication() { ApplicationSpecification spec = Specifications.from(new FooApp()); ApplicationId appId = new ApplicationId("account1", "application1"); store.addApplication(appId, spec); spec = store.getApplication(appId); Assert.assertNotNull(spec); Assert.assertEquals(FooMapReduceJob.class.getName(), spec.getMapReduce().get("mrJob1").getClassName()); }
@Test public void testUpdateChangedApplication() { ApplicationId id = new ApplicationId("account1", "application1"); store.addApplication(id, Specifications.from(new FooApp())); // update store.addApplication(id, Specifications.from(new ChangedFooApp())); ApplicationSpecification spec = store.getApplication(id); Assert.assertNotNull(spec); Assert.assertEquals(FooMapReduceJob.class.getName(), spec.getMapReduce().get("mrJob3").getClassName()); }