public String toShortString() { return getUri().toString() + "/" + getVersion(); }
public String toLongString() { return getUri().toString() + "/" + getVersion() + "[" + getDescription() + "]"; }
private List<JobExecutionPlan> getJobExecutionPlans(String source, String destination, JobSpec jobSpec) throws ExecutionException, InterruptedException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (TopologySpec topologySpec : topologySpecMap.values()) { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { log.info(String.format("Evaluating current JobSpec: %s against TopologySpec: %s with " + "capability of source: %s and destination: %s ", jobSpec.getUri(), topologySpec.getUri(), capability.getKey(), capability.getValue())); if (source.equals(capability.getKey().getNodeName()) && destination .equals(capability.getValue().getNodeName())) { JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(jobSpec, topologySpec.getSpecExecutor()); log.info(String .format("Current JobSpec: %s is executable on TopologySpec: %s. Added TopologySpec as candidate.", jobSpec.getUri(), topologySpec.getUri())); log.info("Since we found a candidate executor, we will not try to compute more. " + "(Intended limitation for IdentityFlowToJobSpecCompiler)"); jobExecutionPlans.add(jobExecutionPlan); return jobExecutionPlans; } } } return jobExecutionPlans; }
@Test (dependsOnMethods = "createTopologySpec") public void deleteTopologySpec() { // List Current Specs Collection<Spec> specs = topologyCatalog.getSpecs(); logger.info("[Before Delete] Number of specs: " + specs.size()); int i=0; for (Spec spec : specs) { TopologySpec topologySpec = (TopologySpec) spec; logger.info("[Before Delete] Spec " + i++ + ": " + gson.toJson(topologySpec)); } Assert.assertTrue(specs.size() == 1, "Spec store should initially have 1 Spec before deletion"); this.topologyCatalog.remove(topologySpec.getUri()); // List Specs after adding specs = topologyCatalog.getSpecs(); logger.info("[After Create] Number of specs: " + specs.size()); i = 0; for (Spec spec : specs) { topologySpec = (TopologySpec) spec; logger.info("[After Create] Spec " + i++ + ": " + gson.toJson(topologySpec)); } Assert.assertTrue(specs.size() == 0, "Spec store should be empty after deletion"); }
@Test public void testWeightedGraphConstruction(){ FlowSpec flowSpec = initFlowSpec(); TopologySpec topologySpec = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR, TEST_SOURCE_NAME, TEST_HOP_NAME_A, TEST_HOP_NAME_B, TEST_SINK_NAME); this.compilerWithTemplateCalague.onAddSpec(topologySpec); // invocation of compileFlow trigger the weighedGraph construction this.compilerWithTemplateCalague.compileFlow(flowSpec); DirectedWeightedMultigraph<ServiceNode, FlowEdge> weightedGraph = compilerWithTemplateCalague.getWeightedGraph(); Assert.assertTrue(weightedGraph.containsVertex(vertexSource)); Assert.assertTrue(weightedGraph.containsVertex(vertexHopA)); Assert.assertTrue(weightedGraph.containsVertex(vertexHopB)); Assert.assertTrue(weightedGraph.containsVertex(vertexSink)); FlowEdge edgeSrc2A = new LoadBasedFlowEdgeImpl(vertexSource, vertexHopA, topologySpec.getSpecExecutor()); FlowEdge edgeA2B = new LoadBasedFlowEdgeImpl(vertexHopA, vertexHopB, topologySpec.getSpecExecutor()); FlowEdge edgeB2Sink = new LoadBasedFlowEdgeImpl(vertexHopB, vertexSink, topologySpec.getSpecExecutor()); Assert.assertTrue(weightedGraph.containsEdge(edgeSrc2A)); Assert.assertTrue(weightedGraph.containsEdge(edgeA2B)); Assert.assertTrue(weightedGraph.containsEdge(edgeB2Sink)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexSource, vertexHopA), edgeSrc2A)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexHopA, vertexHopB), edgeA2B)); Assert.assertTrue(edgeEqual(weightedGraph.getEdge(vertexHopB, vertexSink), edgeB2Sink)); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec.getUri(), ""); }
@Test (dependsOnMethods = "testWeightedGraphConstruction") public void testDijkstraPathFinding(){ FlowSpec flowSpec = initFlowSpec(); TopologySpec topologySpec_1 = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR, TEST_SOURCE_NAME, TEST_HOP_NAME_A, TEST_HOP_NAME_B, TEST_SINK_NAME); TopologySpec topologySpec_2 = initTopologySpec(TOPOLOGY_SPEC_STORE_DIR_SECOND, TEST_SOURCE_NAME, TEST_HOP_NAME_B, TEST_HOP_NAME_C, TEST_SINK_NAME); this.compilerWithTemplateCalague.onAddSpec(topologySpec_1); this.compilerWithTemplateCalague.onAddSpec(topologySpec_2); // Get the edge -> Change the weight -> Materialized the edge change back to graph -> compile again -> Assertion this.compilerWithTemplateCalague.compileFlow(flowSpec); DirectedWeightedMultigraph<ServiceNode, FlowEdge> weightedGraph = compilerWithTemplateCalague.getWeightedGraph(); FlowEdge a2b= weightedGraph.getEdge(vertexHopA, vertexHopB); FlowEdge b2c = weightedGraph.getEdge(vertexHopB, vertexHopC); FlowEdge c2s = weightedGraph.getEdge(vertexHopC, vertexSink); weightedGraph.setEdgeWeight(a2b, 1.99); weightedGraph.setEdgeWeight(b2c, 0.1); weightedGraph.setEdgeWeight(c2s, 0.2); // Best route: Src - B(1) - C(0.1) - sink (0.2) this.compilerWithTemplateCalague.compileFlow(flowSpec); List<FlowEdge> edgeList = dijkstraBasedPathFindingHelper(vertexSource, vertexSink, weightedGraph); FlowEdge src2b = weightedGraph.getEdge(vertexSource, vertexHopB); FlowEdge b2C = weightedGraph.getEdge(vertexHopB, vertexHopC); FlowEdge c2sink = weightedGraph.getEdge(vertexHopC, vertexSink); Assert.assertEquals(edgeList.get(0).getEdgeIdentity(), src2b.getEdgeIdentity()); Assert.assertEquals(edgeList.get(1).getEdgeIdentity(), b2C.getEdgeIdentity()); Assert.assertEquals(edgeList.get(2).getEdgeIdentity(), c2sink.getEdgeIdentity()); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec_1.getUri(), ""); this.compilerWithTemplateCalague.onDeleteSpec(topologySpec_2.getUri(), ""); }
public String toShortString() { return getUri().toString() + "/" + getVersion(); }
public String toLongString() { return getUri().toString() + "/" + getVersion() + "[" + getDescription() + "]"; }
private List<JobExecutionPlan> getJobExecutionPlans(String source, String destination, JobSpec jobSpec) throws ExecutionException, InterruptedException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (TopologySpec topologySpec : topologySpecMap.values()) { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { log.info(String.format("Evaluating current JobSpec: %s against TopologySpec: %s with " + "capability of source: %s and destination: %s ", jobSpec.getUri(), topologySpec.getUri(), capability.getKey(), capability.getValue())); if (source.equals(capability.getKey().getNodeName()) && destination .equals(capability.getValue().getNodeName())) { JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(jobSpec, topologySpec.getSpecExecutor()); log.info(String .format("Current JobSpec: %s is executable on TopologySpec: %s. Added TopologySpec as candidate.", jobSpec.getUri(), topologySpec.getUri())); log.info("Since we found a candidate executor, we will not try to compute more. " + "(Intended limitation for IdentityFlowToJobSpecCompiler)"); jobExecutionPlans.add(jobExecutionPlan); return jobExecutionPlans; } } } return jobExecutionPlans; }