/** * Return the {@link ArtifactType} for a {@link ModuleDefinition} in the context * of a defined stream. * * @param moduleDefinition the module for which to determine the type * @return {@link ArtifactType} for the given module */ private ArtifactType determineModuleType(ModuleDefinition moduleDefinition) { // Parser has already taken care of source/sink named channels, etc boolean hasOutput = moduleDefinition.getParameters().containsKey(BindingProperties.OUTPUT_BINDING_KEY); boolean hasInput = moduleDefinition.getParameters().containsKey(BindingProperties.INPUT_BINDING_KEY); if (hasInput && hasOutput) { return ArtifactType.processor; } else if (hasInput) { return ArtifactType.sink; } else if (hasOutput) { return ArtifactType.source; } else { throw new IllegalStateException(moduleDefinition + " had neither input nor output set"); } }
/** * Return {@code true} if the provided module is a consumer of partitioned data. * This is determined either by the deployment properties for the module * or whether the previous (upstream) module is publishing partitioned data. * * @param module module for which to determine if it is consuming partitioned data * @param moduleDeploymentProperties deployment properties for the module * @param upstreamModuleSupportsPartition if true, previous (upstream) module * in the stream publishes partitioned data * @return true if this module consumes partitioned data */ private boolean isPartitionedConsumer(ModuleDefinition module, Map<String, String> moduleDeploymentProperties, boolean upstreamModuleSupportsPartition) { return upstreamModuleSupportsPartition || (module.getParameters().containsKey(BindingProperties.INPUT_BINDING_KEY) && moduleDeploymentProperties.containsKey(BindingProperties.PARTITIONED_PROPERTY) && moduleDeploymentProperties.get(BindingProperties.PARTITIONED_PROPERTY).equalsIgnoreCase("true")); }
Map<String, String> toModuleLauncherEnvironment(ModuleDeploymentRequest moduleDeploymentRequest) { HashMap<String, String> environment = new HashMap<>(); environment.put("modules", moduleDeploymentRequest.getCoordinates().toString()); environment.putAll(ModuleArgumentQualifier.qualifyArgs(0, moduleDeploymentRequest.getDefinition().getParameters())); environment.putAll(ModuleArgumentQualifier.qualifyArgs(0, moduleDeploymentRequest.getDeploymentProperties())); return toEnvironmentVariables(environment); }
/** * Looks at parameters of a module that represent maven coordinates and, if a simple name has been used, * resolve it from the {@link ArtifactRegistry}. */ private ModuleDefinition postProcessLibraryProperties(ModuleDefinition module) { String includes = module.getParameters().get("includes"); if (includes == null) { return module; } String[] libs = StringUtils.delimitedListToStringArray(includes, ",", " \t"); for (int i = 0; i < libs.length; i++) { ArtifactCoordinates coordinates; try { coordinates = ArtifactCoordinates.parse(libs[i]); } catch (IllegalArgumentException e) { ArtifactRegistration registration = registry.find(libs[i], ArtifactType.library); if (registration == null) { throw new IllegalArgumentException("'" + libs[i] + "' could not be parsed as maven coordinates and is not a registered library"); } coordinates = registration.getCoordinates(); } libs[i] = coordinates.toString(); } return ModuleDefinition.Builder.from(module) .setParameter("includes", StringUtils.arrayToCommaDelimitedString(libs)) .build(); }
@Override public ModuleDeploymentId deploy(ModuleDeploymentRequest request) { int count = request.getCount(); ModuleCoordinates coordinates = request.getCoordinates(); ModuleDefinition definition = request.getDefinition(); ModuleDeploymentId id = ModuleDeploymentId.fromModuleDefinition(definition); String clusterId = moduleDeploymentIdToClusterId(id); String module = coordinates.toString(); Map<String, String> definitionParameters = definition.getParameters(); Map<String, String> deploymentProperties = request.getDeploymentProperties(); logger.info("deploying request for definition: " + definition); logger.info("deploying module: " + module); logger.info("definitionParameters: " + definitionParameters); logger.info("deploymentProperties: " + deploymentProperties); // TODO: using default app name "app" until we start to customise // via deploymentProperties Message<Events> message = MessageBuilder.withPayload(Events.DEPLOY) .setHeader(YarnCloudAppStateMachine.HEADER_APP_VERSION, "app") .setHeader(YarnCloudAppStateMachine.HEADER_CLUSTER_ID, clusterId) .setHeader(YarnCloudAppStateMachine.HEADER_COUNT, count) .setHeader(YarnCloudAppStateMachine.HEADER_MODULE, module) .setHeader(YarnCloudAppStateMachine.HEADER_DEFINITION_PARAMETERS, definitionParameters) .build(); stateMachine.sendEvent(message); return id; }