public String toLongString() { return getUri().toString() + "/" + getVersion() + "[" + getDescription() + "]"; }
.builder(topologyConfig.getString(ServiceConfigKeys.TOPOLOGYSPEC_URI_KEY)) .withConfig(topologyConfig) .withDescription(description)
@Override public synchronized void onAddSpec(Spec addedSpec) { TopologySpec spec = (TopologySpec) addedSpec; log.info ("Loading topology {}", spec.toLongString()); for (Map.Entry entry: spec.getConfigAsProperties().entrySet()) { log.info ("topo: {} --> {}", entry.getKey(), entry.getValue()); } topologySpecMap.put(addedSpec.getUri(), (TopologySpec) addedSpec); }
public String toShortString() { return getUri().toString() + "/" + getVersion(); }
private List<JobExecutionPlan> getJobExecutionPlans(String source, String destination, JobSpec jobSpec) throws ExecutionException, InterruptedException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (TopologySpec topologySpec : topologySpecMap.values()) { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { log.info(String.format("Evaluating current JobSpec: %s against TopologySpec: %s with " + "capability of source: %s and destination: %s ", jobSpec.getUri(), topologySpec.getUri(), capability.getKey(), capability.getValue())); if (source.equals(capability.getKey().getNodeName()) && destination .equals(capability.getValue().getNodeName())) { JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(jobSpec, topologySpec.getSpecExecutor()); log.info(String .format("Current JobSpec: %s is executable on TopologySpec: %s. Added TopologySpec as candidate.", jobSpec.getUri(), topologySpec.getUri())); log.info("Since we found a candidate executor, we will not try to compute more. " + "(Intended limitation for IdentityFlowToJobSpecCompiler)"); jobExecutionPlans.add(jobExecutionPlan); return jobExecutionPlans; } } } return jobExecutionPlans; }
@Test (dependsOnMethods = "createTopologySpec") public void deleteTopologySpec() { // List Current Specs Collection<Spec> specs = topologyCatalog.getSpecs(); logger.info("[Before Delete] Number of specs: " + specs.size()); int i=0; for (Spec spec : specs) { TopologySpec topologySpec = (TopologySpec) spec; logger.info("[Before Delete] Spec " + i++ + ": " + gson.toJson(topologySpec)); } Assert.assertTrue(specs.size() == 1, "Spec store should initially have 1 Spec before deletion"); this.topologyCatalog.remove(topologySpec.getUri()); // List Specs after adding specs = topologyCatalog.getSpecs(); logger.info("[After Create] Number of specs: " + specs.size()); i = 0; for (Spec spec : specs) { topologySpec = (TopologySpec) spec; logger.info("[After Create] Spec " + i++ + ": " + gson.toJson(topologySpec)); } Assert.assertTrue(specs.size() == 0, "Spec store should be empty after deletion"); }
@Test public void testGetTopologies() { Collection<TopologySpec> topologySpecs = _configBasedTopologySpecFactory.getTopologies(); Assert.assertTrue(topologySpecs.size() == 2, "Expected 2 topologies but received: " + topologySpecs.size()); Iterator<TopologySpec> topologySpecIterator = topologySpecs.iterator(); TopologySpec topologySpec1 = topologySpecIterator.next(); Assert.assertTrue(topologySpec1.getDescription().equals("Topology for cluster"), "Description did not match with construction"); Assert.assertTrue(topologySpec1.getVersion().equals("1"), "Version did not match with construction"); TopologySpec topologySpec2 = topologySpecIterator.next(); Assert.assertTrue(topologySpec2.getDescription().equals("Topology for Azkaban"), "Description did not match with construction"); Assert.assertTrue(topologySpec2.getVersion().equals("2"), "Version did not match with construction"); }
/** * This method first retrieves the logical names of all the {@link org.apache.gobblin.runtime.api.SpecExecutor}s * for this edge and returns the SpecExecutors from the {@link TopologySpec} map. * @param edgeConfig containing the logical names of SpecExecutors for this edge. * @return a {@link List<SpecExecutor>}s for this edge. */ private List<SpecExecutor> getSpecExecutors(Config edgeConfig) throws URISyntaxException { //Get the logical names of SpecExecutors where the FlowEdge can be executed. List<String> specExecutorNames = ConfigUtils.getStringList(edgeConfig, FlowGraphConfigurationKeys.FLOW_EDGE_SPEC_EXECUTORS_KEY); //Load all the SpecExecutor configurations for this FlowEdge from the SpecExecutor Catalog. List<SpecExecutor> specExecutors = new ArrayList<>(); for (String specExecutorName: specExecutorNames) { URI specExecutorUri = new URI(specExecutorName); specExecutors.add(this.topologySpecMap.get(specExecutorUri).getSpecExecutor()); } return specExecutors; }
@Override public void put(Spec spec) { try { Preconditions.checkState(state() == Service.State.RUNNING, String.format("%s is not running.", this.getClass().getName())); Preconditions.checkNotNull(spec); log.info(String.format("Adding TopologySpec with URI: %s and Config: %s", spec.getUri(), ((TopologySpec) spec).getConfigAsProperties())); specStore.addSpec(spec); this.listeners.onAddSpec(spec); } catch (IOException e) { throw new RuntimeException("Cannot add Spec to Spec store: " + spec, e); } }
public TopologySpec build() { Preconditions.checkNotNull(this.uri); Preconditions.checkNotNull(this.version); return new TopologySpec(getURI(), getVersion(), getDescription(), getConfig(), getConfigAsProperties(), getSpecExceutorInstance()); }
@Override public String toString() { return toShortString(); }
@Test public void testWeightedGraphConstruction(){ FlowSpec flowSpec = initFlowSpec(); TopologySpec topologySpec = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR, TEST_SOURCE_NAME, TEST_HOP_NAME_A, TEST_HOP_NAME_B, TEST_SINK_NAME); this.compilerWithTemplateCalague.onAddSpec(topologySpec); // invocation of compileFlow trigger the weighedGraph construction this.compilerWithTemplateCalague.compileFlow(flowSpec); DirectedWeightedMultigraph<ServiceNode, FlowEdge> weightedGraph = compilerWithTemplateCalague.getWeightedGraph(); Assert.assertTrue(weightedGraph.containsVertex(vertexSource)); Assert.assertTrue(weightedGraph.containsVertex(vertexHopA)); Assert.assertTrue(weightedGraph.containsVertex(vertexHopB)); Assert.assertTrue(weightedGraph.containsVertex(vertexSink)); FlowEdge edgeSrc2A = new LoadBasedFlowEdgeImpl(vertexSource, vertexHopA, topologySpec.getSpecExecutor()); FlowEdge edgeA2B = new LoadBasedFlowEdgeImpl(vertexHopA, vertexHopB, topologySpec.getSpecExecutor()); FlowEdge edgeB2Sink = new LoadBasedFlowEdgeImpl(vertexHopB, vertexSink, topologySpec.getSpecExecutor()); Assert.assertTrue(weightedGraph.containsEdge(edgeSrc2A)); Assert.assertTrue(weightedGraph.containsEdge(edgeA2B)); Assert.assertTrue(weightedGraph.containsEdge(edgeB2Sink)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexSource, vertexHopA), edgeSrc2A)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexHopA, vertexHopB), edgeA2B)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexHopB, vertexSink), edgeB2Sink)); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec.getUri(), ""); }
public String toShortString() { return getUri().toString() + "/" + getVersion(); }
@Test (dependsOnMethods = "testWeightedGraphConstruction") public void testDijkstraPathFinding(){ FlowSpec flowSpec = initFlowSpec(); TopologySpec topologySpec_1 = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR, TEST_SOURCE_NAME, TEST_HOP_NAME_A, TEST_HOP_NAME_B, TEST_SINK_NAME); TopologySpec topologySpec_2 = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR_SECOND, TEST_SOURCE_NAME, TEST_HOP_NAME_B, TEST_HOP_NAME_C, TEST_SINK_NAME); this.compilerWithTemplateCalague.onAddSpec(topologySpec_1); this.compilerWithTemplateCalague.onAddSpec(topologySpec_2); // Get the edge -> Change the weight -> Materialized the edge change back to graph -> compile again -> Assertion this.compilerWithTemplateCalague.compileFlow(flowSpec); DirectedWeightedMultigraph<ServiceNode, FlowEdge> weightedGraph = compilerWithTemplateCalague.getWeightedGraph(); FlowEdge a2b= weightedGraph.getEdge(vertexHopA, vertexHopB); FlowEdge b2c = weightedGraph.getEdge(vertexHopB, vertexHopC); FlowEdge c2s = weightedGraph.getEdge(vertexHopC, vertexSink); weightedGraph.setEdgeWeight(a2b, 1.99); weightedGraph.setEdgeWeight(b2c, 0.1); weightedGraph.setEdgeWeight(c2s, 0.2); // Best route: Src - B(1) - C(0.1) - sink (0.2) this.compilerWithTemplateCalague.compileFlow(flowSpec); List<FlowEdge> edgeList = dijkstraBasedPathFindingHelper(vertexSource, vertexSink, weightedGraph); FlowEdge src2b = weightedGraph.getEdge(vertexSource, vertexHopB); FlowEdge b2C = weightedGraph.getEdge(vertexHopB, vertexHopC); FlowEdge c2sink = weightedGraph.getEdge(vertexHopC, vertexSink); Assert.assertEquals(edgeList.get(0).getEdgeIdentity(), src2b.getEdgeIdentity()); Assert.assertEquals(edgeList.get(1).getEdgeIdentity(), b2C.getEdgeIdentity()); Assert.assertEquals(edgeList.get(2).getEdgeIdentity(), c2sink.getEdgeIdentity()); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec_1.getUri(), ""); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec_2.getUri(), ""); }
private void weightGraphGenerateHelper(TopologySpec topologySpec) { try { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { BaseServiceNodeImpl sourceNode = new BaseServiceNodeImpl(capability.getKey().getNodeName()); BaseServiceNodeImpl targetNode = new BaseServiceNodeImpl(capability.getValue().getNodeName()); if (!weightedGraph.containsVertex(sourceNode)) { weightedGraph.addVertex(sourceNode); } if (!weightedGraph.containsVertex(targetNode)) { weightedGraph.addVertex(targetNode); } FlowEdge flowEdge = new LoadBasedFlowEdgeImpl(sourceNode, targetNode, defaultFlowEdgeProps, topologySpec.getSpecExecutor()); // In Multi-Graph if flowEdge existed, just skip it. if (!weightedGraph.containsEdge(flowEdge)) { weightedGraph.addEdge(sourceNode, targetNode, flowEdge); } } } catch (InterruptedException | ExecutionException e) { Instrumented.markMeter(this.flowCompilationFailedMeter); throw new RuntimeException("Cannot determine topology capabilities", e); } }
@Override public void put(Spec spec) { try { Preconditions.checkState(state() == Service.State.RUNNING, String.format("%s is not running.", this.getClass().getName())); Preconditions.checkNotNull(spec); log.info(String.format("Adding TopologySpec with URI: %s and Config: %s", spec.getUri(), ((TopologySpec) spec).getConfigAsProperties())); specStore.addSpec(spec); this.listeners.onAddSpec(spec); } catch (IOException e) { throw new RuntimeException("Cannot add Spec to Spec store: " + spec, e); } }
public TopologySpec build() { Preconditions.checkNotNull(this.uri); Preconditions.checkNotNull(this.version); return new TopologySpec(getURI(), getVersion(), getDescription(), getConfig(), getConfigAsProperties(), getSpecExceutorInstance()); }
@Override public String toString() { return toShortString(); }
public String toLongString() { return getUri().toString() + "/" + getVersion() + "[" + getDescription() + "]"; }
private List<JobExecutionPlan> getJobExecutionPlans(String source, String destination, JobSpec jobSpec) throws ExecutionException, InterruptedException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (TopologySpec topologySpec : topologySpecMap.values()) { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { log.info(String.format("Evaluating current JobSpec: %s against TopologySpec: %s with " + "capability of source: %s and destination: %s ", jobSpec.getUri(), topologySpec.getUri(), capability.getKey(), capability.getValue())); if (source.equals(capability.getKey().getNodeName()) && destination .equals(capability.getValue().getNodeName())) { JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(jobSpec, topologySpec.getSpecExecutor()); log.info(String .format("Current JobSpec: %s is executable on TopologySpec: %s. Added TopologySpec as candidate.", jobSpec.getUri(), topologySpec.getUri())); log.info("Since we found a candidate executor, we will not try to compute more. " + "(Intended limitation for IdentityFlowToJobSpecCompiler)"); jobExecutionPlans.add(jobExecutionPlan); return jobExecutionPlans; } } } return jobExecutionPlans; }