@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
@Override public boolean initializeTestingSpout(String name) { try { System.out.println("[OpenSOC] Initializing Test Spout"); GenericInternalTestSpout testSpout = new GenericInternalTestSpout() .withFilename(test_file_path).withRepeating( config.getBoolean("spout.test.parallelism.repeat")); builder.setSpout(name, testSpout, config.getInt("spout.test.parallelism.hint")).setNumTasks( config.getInt("spout.test.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; } }
private boolean initializeKafkaSpout(String name) { try { BrokerHosts zk = new ZkHosts(config.getString("kafka.zk")); String input_topic = config.getString("spout.kafka.topic"); SpoutConfig kafkaConfig = new SpoutConfig(zk, input_topic, "", input_topic); kafkaConfig.scheme = new SchemeAsMultiScheme(new RawScheme()); kafkaConfig.forceFromStart = Boolean.valueOf("True"); kafkaConfig.startOffsetTime = -1; builder.setSpout(name, new KafkaSpout(kafkaConfig), config.getInt("spout.kafka.parallelism.hint")).setNumTasks( config.getInt("spout.kafka.num.tasks")); } catch (Exception e) { e.printStackTrace(); System.exit(0); } return true; }
new AggregationSpout(aggregationConfig, new MRMetricsAggregateContainer(metrics, aggregationConfig)), tasks ).setNumTasks(tasks);
jobHistorySpout, tasks ).setNumTasks(tasks);
/** * Gets a topology builder and a storm spout configuration. Initializes the spout and sets it with the topology * builder. * * @param builder * @param spout * @return * @throws ConfigurationException */ String configureSpout(TopologyBuilder builder, Configuration spout) throws ConfigurationException { String spoutType = spout.getString(TYPE); Configuration spoutConf = spout.subset(CONF); StormParallelismConfig parallelismConfig = getStormParallelismConfig(spoutConf); IRichSpout spoutComponent = (IRichSpout) buildComponent(spoutType, spoutConf); builder .setSpout(spoutType, spoutComponent, parallelismConfig.getParallelismHint()) .setNumTasks(parallelismConfig.getNumTasks()); return spoutType; }
mrRunningJobConfig.getZkStateConfig()), tasks ).setNumTasks(tasks);
int numOfTrafficMonitorTasks = config.hasPath(TRAFFIC_MONITOR_TASK_NUM) ? config.getInt(TRAFFIC_MONITOR_TASK_NUM) : numOfParserTasks; builder.setSpout("ingest", spout, numOfSpoutTasks).setNumTasks(numOfSpoutTasks);
int numOfSinkTasks = config.getInt(SINK_TASK_NUM); builder.setSpout("ingest", spout, numOfSpoutTasks).setNumTasks(numOfSpoutTasks); .setNumTasks(1);
@Override public StormTopology execute(Config config, StormEnvironment environment) { // 1. Init conf SparkHistoryJobAppConfig sparkHistoryJobAppConfig = SparkHistoryJobAppConfig.newInstance(config); final String jobFetchSpoutName = SparkHistoryJobAppConfig.SPARK_HISTORY_JOB_FETCH_SPOUT_NAME; final String jobParseBoltName = SparkHistoryJobAppConfig.SPARK_HISTORY_JOB_PARSE_BOLT_NAME; // 2. Config topology. TopologyBuilder topologyBuilder = new TopologyBuilder(); topologyBuilder.setSpout( jobFetchSpoutName, new SparkHistoryJobSpout(sparkHistoryJobAppConfig), sparkHistoryJobAppConfig.stormConfig.numOfSpoutExecutors ).setNumTasks(sparkHistoryJobAppConfig.stormConfig.numOfSpoutTasks); topologyBuilder.setBolt( jobParseBoltName, new SparkHistoryJobParseBolt(sparkHistoryJobAppConfig), sparkHistoryJobAppConfig.stormConfig.numOfParserBoltExecutors ).setNumTasks(sparkHistoryJobAppConfig.stormConfig.numOfParserBoltTasks).shuffleGrouping(jobFetchSpoutName); return topologyBuilder.createTopology(); } }
new MockMetadataChangeNotifyService(topoId, spoutId), numBolts); SpoutDeclarer declarer = topoBuilder.setSpout(spoutId, spout); declarer.setNumTasks(2); for (int i = 0; i < numBolts; i++) { TestBolt bolt = new TestBolt();
TopologyBuilder builder = new TopologyBuilder(); builder.setSpout(spoutName, spout, numOfSpoutTasks).setNumTasks(numOfSpoutTasks); builder.setBolt(persistBoltName, bolt, numOfPersistTasks).setNumTasks(numOfPersistTasks).shuffleGrouping(spoutName);
public StormTopology buildTopology(String topic) { SpoutConfig kafkaConfig = new SpoutConfig(brokerHosts, topic, "", "xlog_"+topic); kafkaConfig.scheme = new SchemeAsMultiScheme(new StringScheme()); TopologyBuilder builder = new TopologyBuilder(); builder.setSpout("KafkaSpout", new KafkaSpout(kafkaConfig), 2).setNumTasks(8); builder.setBolt("SplitBolt", new SplitSentence(), 1).setNumTasks(2).shuffleGrouping("KafkaSpout"); builder.setBolt("XlogBolt", new XlogBolt(), 4).setNumTasks(8).fieldsGrouping("SplitBolt", new Fields("ip")); return builder.createTopology(); }
sparkRunningJobAppConfig.getZkStateConfig()), parallelism ).setNumTasks(tasks);