public DefaultFlowSpecification(String className, FlowSpecification other) { this(className, other.getName(), other.getDescription(), other.getFlowlets(), other.getConnections()); }
Table<Node, String, Set<QueueSpecification>> table = HashBasedTable.create(); String flow = input.getName(); Map<String, FlowletDefinition> flowlets = input.getFlowlets(); for (FlowletConnection connection : input.getConnections()) { final String source = connection.getSourceName(); final String target = connection.getTargetName();
@Override protected ProgramController launch(Program program, ProgramOptions options, File hConfFile, File cConfFile, ApplicationLauncher launcher) { // Extract and verify parameters FlowSpecification flowSpec = program.getSpecification(); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type."); Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported."); try { Preconditions.checkNotNull(flowSpec, "Missing FlowSpecification for %s", program.getName()); for (FlowletDefinition flowletDefinition : flowSpec.getFlowlets().values()) { int maxInstances = flowletDefinition.getFlowletSpec().getMaxInstances(); Preconditions.checkArgument(flowletDefinition.getInstances() <= maxInstances, "Flowlet %s can have a maximum of %s instances", flowletDefinition.getFlowletSpec().getName(), maxInstances); } LOG.info("Configuring flowlets queues"); Multimap<String, QueueName> flowletQueues = FlowUtils.configureQueue(program, flowSpec, queueAdmin); // Launch flowlet program runners LOG.info("Launching distributed flow: " + program.getName() + ":" + flowSpec.getName()); TwillController controller = launcher.launch(new FlowTwillApplication(program, flowSpec, hConfFile, cConfFile, eventHandler)); DistributedFlowletInstanceUpdater instanceUpdater = new DistributedFlowletInstanceUpdater(program, controller, queueAdmin, flowletQueues); return new FlowTwillProgramController(program.getName(), controller, instanceUpdater).startListen(); } catch (Exception e) { throw Throwables.propagate(e); } }
@Override protected void doResume() throws Exception { LOG.info("Resuming flow: " + flowSpec.getName()); lock.lock(); try { Futures.successfulAsList( Iterables.transform(flowlets.values(), new Function<ProgramController, ListenableFuture<ProgramController>>() { @Override public ListenableFuture<ProgramController> apply(ProgramController input) { return input.resume(); } })).get(); } finally { lock.unlock(); } LOG.info("Flow resumed: " + flowSpec.getName()); }
public void toJson(FlowSpecification flowSpec, Appendable appendable) throws IOException { Preconditions.checkState(schemaGenerator != null, "No schema generator is configured. Fail to serialize to json"); try { for (FlowletDefinition flowletDef : flowSpec.getFlowlets().values()) { flowletDef.generateSchema(schemaGenerator); } gson.toJson(flowSpec, FlowSpecification.class, appendable); } catch (UnsupportedTypeException e) { throw new IOException(e); } }
@Override public TwillSpecification configure() { TwillSpecification.Builder.MoreRunnable moreRunnable = TwillSpecification.Builder.with() .setName(String.format("%s.%s", ProgramType.FLOW.name().toLowerCase(), spec.getName())) .withRunnable(); List<String> flowletNames = Lists.newArrayList(); TwillSpecification.Builder.RunnableSetter runnableSetter = null; for (Map.Entry<String, FlowletDefinition> entry : spec.getFlowlets().entrySet()) { FlowletDefinition flowletDefinition = entry.getValue(); FlowletSpecification flowletSpec = flowletDefinition.getFlowletSpec();
@Override protected void doSuspend() throws Exception { LOG.info("Suspending flow: " + flowSpec.getName()); lock.lock(); try { Futures.successfulAsList( Iterables.transform(flowlets.values(), new Function<ProgramController, ListenableFuture<ProgramController>>() { @Override public ListenableFuture<ProgramController> apply(ProgramController input) { return input.suspend(); } })).get(); } finally { lock.unlock(); } LOG.info("Flow suspended: " + flowSpec.getName()); }
private SchemaCache createSchemaCache(Program program) throws Exception { ImmutableSet.Builder<Schema> schemas = ImmutableSet.builder(); for (FlowletDefinition flowletDef : program.getSpecification().getFlowlets().values()) { schemas.addAll(Iterables.concat(flowletDef.getInputs().values())); schemas.addAll(Iterables.concat(flowletDef.getOutputs().values())); } return new SchemaCache(schemas.build(), program.getClassLoader()); }
@Override public JsonElement serialize(FlowSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("className", new JsonPrimitive(src.getClassName())); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("flowlets", serializeMap(src.getFlowlets(), context, FlowletDefinition.class)); jsonObj.add("connections", serializeList(src.getConnections(), context, FlowletConnection.class)); return jsonObj; }
@Override protected void doStop() throws Exception { LOG.info("Stopping flow: " + flowSpec.getName()); lock.lock(); try { Futures.successfulAsList( Iterables.transform(flowlets.values(), new Function<ProgramController, ListenableFuture<ProgramController>>() { @Override public ListenableFuture<ProgramController> apply(ProgramController input) { return input.stop(); } })).get(); } finally { lock.unlock(); } LOG.info("Flow stopped: " + flowSpec.getName()); }
void update(String flowletId, int newInstanceCount, int oldInstanceCount) throws Exception { FlowletDefinition flowletDefinition = program.getSpecification().getFlowlets().get(flowletId); int maxInstances = flowletDefinition.getFlowletSpec().getMaxInstances(); Preconditions.checkArgument(newInstanceCount <= maxInstances, "Flowlet %s can have a maximum of %s instances", flowletId, maxInstances); waitForInstances(flowletId, oldInstanceCount); twillController.sendCommand(flowletId, ProgramCommands.SUSPEND).get(); FlowUtils.reconfigure(consumerQueues.get(flowletId), FlowUtils.generateConsumerGroupId(program, flowletId), newInstanceCount, queueAdmin); twillController.changeInstances(flowletId, newInstanceCount).get(); twillController.sendCommand(flowletId, ProgramCommands.RESUME).get(); }
@Override public void startFlow(File jarPath, String className, Map<String, String> userArgs) { try { Program program = deployClient.createProgram(jarPath, className, jarUnpackDir); String flowName = program.getSpecification().getName(); if (listAllFlows().contains(flowName)) { throw new Exception("Flow with the same name is running! Stop or Delete the Flow before starting again"); } Location jarInHDFS = location.append(flowName); //Delete any existing JAR with the same flowName. jarInHDFS.delete(); jarInHDFS.createNew(); //Copy the JAR to HDFS. ByteStreams.copy(Locations.newInputSupplier(program.getJarLocation()), Locations.newOutputSupplier(jarInHDFS)); //Start the Flow. deployClient.startFlow(program, userArgs); } catch (Exception e) { LOG.error(e.getMessage(), e); } }
for (Map.Entry<String, FlowletDefinition> entry : flowSpec.getFlowlets().entrySet()) { int instanceCount = entry.getValue().getInstances(); for (int instanceId = 0; instanceId < instanceCount; instanceId++) {
for (Map.Entry<String, FlowletDefinition> entry : flowSpec.getFlowlets().entrySet()) { String flowletId = entry.getKey(); long groupId = FlowUtils.generateConsumerGroupId(program, flowletId);
@Override public ProgramController run(Program program, ProgramOptions options) { // Extract and verify parameters FlowSpecification flowSpec = program.getSpecification(); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type."); Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported."); Preconditions.checkNotNull(flowSpec, "Missing FlowSpecification for %s", program.getName()); for (FlowletDefinition flowletDefinition : flowSpec.getFlowlets().values()) { int maxInstances = flowletDefinition.getFlowletSpec().getMaxInstances(); Preconditions.checkArgument(flowletDefinition.getInstances() <= maxInstances, "Flowlet %s can have a maximum of %s instances", flowletDefinition.getFlowletSpec().getName(), maxInstances); } try { // Launch flowlet program runners RunId runId = RunIds.generate(); programOptions.put(runId, options); Multimap<String, QueueName> consumerQueues = FlowUtils.configureQueue(program, flowSpec, queueAdmin); final Table<String, Integer, ProgramController> flowlets = createFlowlets(program, runId, flowSpec); return new FlowProgramController(flowlets, runId, program, flowSpec, consumerQueues, discoveryServiceClient); } catch (Exception e) { throw Throwables.propagate(e); } }
Preconditions.checkNotNull(processorName, "Missing processor name."); FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName); Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);