protected void submitTopologyRemoteCluster(String arg, StormTopology topology, Config config) throws Exception { StormSubmitter.submitTopology(arg, config, topology); }
private void submitTopologyRemoteCluster(StormTopology topology, Config config) throws Exception { StormSubmitter.submitTopology(tplgyName, config, topology); }
/** * Submits a topology to run on the cluster. A topology runs forever or until explicitly killed. * * @param name the name of the storm. * @param topoConf the topology-specific configuration. See {@link Config}. * @param topology the processing to execute. * @param opts to manipulate the starting of the topology. * @throws AlreadyAliveException if a topology with this name is already running * @throws InvalidTopologyException if an invalid topology was submitted * @throws AuthorizationException if authorization is failed * @thorws SubmitterHookException if any Exception occurs during initialization or invocation of registered {@link ISubmitterHook} */ public static void submitTopology(String name, Map<String, Object> topoConf, StormTopology topology, SubmitOptions opts) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException { submitTopology(name, topoConf, topology, opts, null); }
/** * Submits a topology to run on the cluster. A topology runs forever or until explicitly killed. * * @param name the name of the storm. * @param topoConf the topology-specific configuration. See {@link Config}. * @param topology the processing to execute. * @throws AlreadyAliveException if a topology with this name is already running * @throws InvalidTopologyException if an invalid topology was submitted * @throws AuthorizationException if authorization is failed * @thorws SubmitterHookException if any Exception occurs during initialization or invocation of registered {@link ISubmitterHook} */ public static void submitTopology(String name, Map<String, Object> topoConf, StormTopology topology) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException { submitTopology(name, topoConf, topology, null, null); }
/** * Submits the topology under a specific name **/ protected int submit(String name, Config conf, TopologyBuilder builder) { try { StormSubmitter.submitTopology(name, conf, builder.createTopology()); } catch (Exception e) { e.printStackTrace(); return -1; } return 0; }
protected void runMain(String[] args) throws Exception { final String brokerUrl = args.length > 0 ? args[0] : KAFKA_LOCAL_BROKER; System.out.println("Running with broker url: " + brokerUrl); Config tpConf = getConfig(); // Producers. This is just to get some data in Kafka, normally you would be getting this data from elsewhere StormSubmitter.submitTopology(TOPIC_0 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_0)); StormSubmitter.submitTopology(TOPIC_1 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_1)); StormSubmitter.submitTopology(TOPIC_2 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_2)); //Consumer. Sets up a topology that reads the given Kafka spouts and logs the received messages StormSubmitter.submitTopology("storm-kafka-client-spout-test", tpConf, getTopologyKafkaSpout(getKafkaSpoutConfig(brokerUrl))); }
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println("Usage: WordCountTrident redis-host redis-port"); System.exit(1); } String redisHost = args[0]; Integer redisPort = Integer.valueOf(args[1]); Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort)); }
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println("Usage: WordCountTrident redis-host redis-port"); System.exit(1); } String redisHost = args[0]; Integer redisPort = Integer.valueOf(args[1]); Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHost, redisPort)); } }
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println("Usage: WordCountTrident 127.0.0.1:6379,127.0.0.1:6380"); System.exit(1); } String redisHostPort = args[0]; Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort)); }
public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println("Usage: WordCountTrident 127.0.0.1:6379,127.0.0.1:6380"); System.exit(1); } String redisHostPort = args[0]; Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort)); }
public static void main(String[] args) throws Exception { Config conf = new Config(); StormSubmitter.submitTopology("reach", conf, buildTopology()); try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) { Thread.sleep(2000); System.out.println("REACH: " + drpc.execute("reach", "aaa")); System.out.println("REACH: " + drpc.execute("reach", "foo.com/blog/1")); System.out.println("REACH: " + drpc.execute("reach", "engineering.twitter.com/blog/5")); } }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); Yaml yaml = new Yaml(); InputStream in = new FileInputStream(args[1]); Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in); in.close(); conf.put("hdfs.config", yamlConf); String topoName = "wordCounter"; if (args.length == 3) { topoName = args[2]; } else if (args.length > 3) { System.out.println("Usage: TridentFileTopology [hdfs url] [hdfs yaml config file] <topology name>"); return; } conf.setNumWorkers(3); StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0])); } }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); Yaml yaml = new Yaml(); InputStream in = new FileInputStream(args[1]); Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(in); in.close(); conf.put("hdfs.config", yamlConf); String topoName = "wordCounter"; if (args.length == 3) { topoName = args[2]; } else if (args.length > 3) { System.out.println("Usage: TridentSequenceTopology <hdfs_config_yaml> [<topology name>]"); return; } conf.setNumWorkers(3); StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0])); } }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); String topoName = "wordCounter"; if (args.length == 3) { topoName = args[2]; } else if (args.length > 3 || args.length < 2) { System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]"); return; } conf.setNumWorkers(3); StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0], args[1])); }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); String topoName = "wordCounter"; if (args.length == 2) { topoName = args[1]; } else if (args.length > 2) { System.out.println("Usage: WordCountTrident <hbase.rootdir> [topology name]"); return; } conf.setNumWorkers(3); StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0])); }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); String topoName = "wordCounter"; if (args.length == 3) { topoName = args[2]; } else if (args.length > 3 || args.length < 2) { System.out.println("Usage: WordCountTrident <mongodb url> <mongodb collection> [topology name]"); return; } conf.setNumWorkers(3); StormSubmitter.submitTopology(topoName, conf, buildTopology(args[0], args[1])); }
protected void run(String[] args) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException, InterruptedException { final String brokerUrl = args.length > 0 ? args[0] : KAFKA_LOCAL_BROKER; final boolean isOpaque = args.length > 1 ? Boolean.parseBoolean(args[1]) : true; System.out.println("Running with broker url " + brokerUrl + " and isOpaque=" + isOpaque); Config tpConf = new Config(); tpConf.setDebug(true); tpConf.setMaxSpoutPending(5); // Producers StormSubmitter.submitTopology(TOPIC_1 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_1)); StormSubmitter.submitTopology(TOPIC_2 + "-producer", tpConf, KafkaProducerTopology.newTopology(brokerUrl, TOPIC_2)); // Consumer KafkaTridentSpoutConfig<String, String> spoutConfig = newKafkaSpoutConfig(brokerUrl); ITridentDataSource spout = isOpaque ? newKafkaTridentSpoutOpaque(spoutConfig) : newKafkaTridentSpoutTransactional(spoutConfig); StormSubmitter.submitTopology("topics-consumer", tpConf, TridentKafkaConsumerTopology.newTopology(spout)); } }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); String topologyName = "wordCounter"; if (args.length < 2) { System.out.println("Usage: WordCountTrident <nameserver addr> <topic> [topology name]"); } else { if (args.length > 3) { topologyName = args[2]; } StormSubmitter.submitTopology(topologyName, conf, buildTopology(args[0], args[1])); } } }
public static void main(String[] args) throws Exception { Config conf = new Config(); conf.setMaxSpoutPending(5); conf.setNumWorkers(3); String topologyName = "wordCounter"; if (args.length < 2) { System.out.println("Usage: WordCountTopology <nameserver addr> <topic> [topology name]"); } else { if (args.length > 3) { topologyName = args[2]; } StormSubmitter.submitTopology(topologyName, conf, buildTopology(args[0], args[1])); } } }
public static void main(String[] args) throws Exception { TopologyBuilder builder = new TopologyBuilder(); DRPCSpout spout = new DRPCSpout("exclamation"); builder.setSpout("drpc", spout); builder.setBolt("exclaim", new ExclamationBolt(), 3).shuffleGrouping("drpc"); builder.setBolt("return", new ReturnResults(), 3).shuffleGrouping("exclaim"); Config conf = new Config(); StormSubmitter.submitTopology("exclaim", conf, builder.createTopology()); try (DRPCClient drpc = DRPCClient.getConfiguredClient(conf)) { System.out.println(drpc.execute("exclamation", "aaa")); System.out.println(drpc.execute("exclamation", "bbb")); } }