public Stream newDRPCStream(String function) { return newDRPCStream(new DRPCSpout(function)); }
public Stream newDRPCStream(String function, ILocalDRPC server) { DRPCSpout spout; if(server==null) { spout = new DRPCSpout(function); } else { spout = new DRPCSpout(function, server); } return newDRPCStream(spout); }
public static StormTopology buildTopology(LocalDRPC drpc) { TridentTopology topology = new TridentTopology(); TridentState urlToTweeters = topology.newStaticState(new StaticSingleKeyMapState.Factory(TWEETERS_DB)); TridentState tweetersToFollowers = topology.newStaticState(new StaticSingleKeyMapState.Factory(FOLLOWERS_DB)); topology.newDRPCStream("reach", drpc) .stateQuery(urlToTweeters, new Fields("args"), new MapGet(), new Fields("tweeters")) .each(new Fields("tweeters"), new ExpandList(), new Fields("tweeter")).shuffle() .stateQuery(tweetersToFollowers, new Fields("tweeter"), new MapGet(), new Fields("followers")) .each(new Fields("followers"), new ExpandList(), new Fields("follower")).groupBy(new Fields("follower")) .aggregate(new One(), new Fields("one")).aggregate(new Fields("one"), new Sum(), new Fields("reach")); return topology.build(); }
public static StormTopology buildTopology(LocalDRPC drpc) { FixedBatchSpout spout = new FixedBatchSpout(new Fields("word"), 3, new Values("the cow jumped over the moon"), new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"), new Values("how many apples can you eat"), new Values("to be or not to be the person")); spout.setCycle(true); TridentTopology topology = new TridentTopology(); TridentState wordCounts = topology.newStream("spout1", spout).parallelismHint(16).flatMap(split).map(toUpper) .filter(theFilter).peek(new Consumer() { @Override public void accept(TridentTuple input) { System.out.println(input.getString(0)); } }).groupBy(new Fields("word")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")) .parallelismHint(16); topology.newDRPCStream("words", drpc).flatMap(split).groupBy(new Fields("args")) .stateQuery(wordCounts, new Fields("args"), new MapGet(), new Fields("count")).filter(new FilterNull()) .aggregate(new Fields("count"), new Sum(), new Fields("sum")); return topology.build(); }
public static StormTopology buildTopology(LocalDRPC drpc) { FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence"), 3, new Values("the cow jumped over the moon"), new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"), new Values("how many apples can you eat"), new Values("to be or not to be the person")); spout.setCycle(true); TridentTopology topology = new TridentTopology(); TridentState wordCounts = topology.newStream("spout1", spout).parallelismHint(16) .each(new Fields("sentence"), new Split(), new Fields("word")).groupBy(new Fields("word")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")) .parallelismHint(16); topology.newDRPCStream("words", drpc).each(new Fields("args"), new Split(), new Fields("word")) .groupBy(new Fields("word")) .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count")) .each(new Fields("count"), new FilterNull()) .aggregate(new Fields("count"), new Sum(), new Fields("sum")); return topology.build(); }
public Stream newDRPCStream(String function) { return newDRPCStream(new DRPCSpout(function)); }
public Stream newDRPCStream(String function) { return newDRPCStream(new DRPCSpout(function)); }
public Stream newDRPCStream(String function, ILocalDRPC server) { DRPCSpout spout; if(server==null) { spout = new DRPCSpout(function); } else { spout = new DRPCSpout(function, server); } return newDRPCStream(spout); }
public Stream newDRPCStream(String function, ILocalDRPC server) { DRPCSpout spout; if(server==null) { spout = new DRPCSpout(function); } else { spout = new DRPCSpout(function, server); } return newDRPCStream(spout); }
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); topology .newStream("tweets", spout) .each(new Fields("str"), new Print()) ; topology .newDRPCStream("ping"); return topology.build(); }
public static void main(String[] args) throws Exception { Config conf = new Config(); // Submits the topology String topologyName = args[0]; conf.setNumWorkers(8); // Our Vagrant environment has 8 workers FakeTweetsBatchSpout fakeTweets = new FakeTweetsBatchSpout(10); TridentTopology topology = new TridentTopology(); TridentState countState = topology .newStream("spout", fakeTweets) .groupBy(new Fields("actor")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")); topology .newDRPCStream("count_per_actor") .stateQuery(countState, new Fields("args"), new MapGet(), new Fields("count")); StormSubmitter.submitTopology(topologyName, conf, topology.build()); }
.newDRPCStream("hashtags", drpc) .each(new Fields("args"), new Split(), new Fields("hashtag")) .groupBy(new Fields("hashtag"))
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); /** * As a first thing, we need a stream of tweets which we can parse and extract * only the text and its id. As you will notice, we're going to store the stream * using the {@link ElasticSearchState} implementation using its {@link StateUpdater}. * Check their implementations for details. */ topology .newStream("tweets", spout) .each(new Fields("str"), new ParseTweet(), new Fields("text", "content", "user")) .each(new Fields("text", "content"), new TweetIdExtractor(), new Fields("tweetId")) .project(new Fields("tweetId", "text")) .each(new Fields("tweetId", "text"), new Print()) .partitionPersist(new ElasticSearchStateFactory(), new Fields("tweetId", "text"), new ElasticSearchStateUpdater()); /** * Now we need a DRPC stream to query the state where the tweets are stored. * To do that, as shown below, we need an implementation of {@link QueryFunction} to * access our {@link ElasticSearchState}. */ TridentState elasticSearchState = topology.newStaticState(new ElasticSearchStateFactory()); topology .newDRPCStream("search") .each(new Fields("args"), new Split(" "), new Fields("keywords")) // let's split the arguments .stateQuery(elasticSearchState, new Fields("keywords"), new TweetQuery(), new Fields("ids")) // and pass them as query parameters .project(new Fields("ids")); return topology.build(); }
public StormTopology buildTopology(LocalDRPC drpc) { TridentKafkaConfig kafkaConfig = new TridentKafkaConfig(brokerHosts, "storm-sentence", "storm"); kafkaConfig.scheme = new SchemeAsMultiScheme(new StringScheme()); TransactionalTridentKafkaSpout kafkaSpout = new TransactionalTridentKafkaSpout(kafkaConfig); TridentTopology topology = new TridentTopology(); TridentState wordCounts = topology.newStream("kafka", kafkaSpout).shuffle(). each(new Fields("str"), new WordSplit(), new Fields("word")). groupBy(new Fields("word")). persistentAggregate(new HazelCastStateFactory(), new Count(), new Fields("aggregates_words")).parallelismHint(2); topology.newDRPCStream("words", drpc) .each(new Fields("args"), new Split(), new Fields("word")) .groupBy(new Fields("word")) .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count")) .each(new Fields("count"), new FilterNull()) .aggregate(new Fields("count"), new Sum(), new Fields("sum")); return topology.build(); }
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); TridentState count = topology .newStream("tweets", spout) .each(new Fields("str"), new ParseTweet(), new Fields("text", "content", "user")) .project(new Fields("content", "user")) .each(new Fields("content"), new OnlyHashtags()) .each(new Fields("user"), new OnlyEnglish()) .each(new Fields("content", "user"), new ExtractFollowerClassAndContentName(), new Fields("followerClass", "contentName")) .parallelismHint(3) .groupBy(new Fields("followerClass", "contentName")) .persistentAggregate(new HazelCastStateFactory(), new Count(), new Fields("count")) .parallelismHint(3) ; topology .newDRPCStream("hashtag_count") .each(new Constants<String>("< 100", "< 10K", "< 100K", ">= 100K"), new Fields("followerClass")) .stateQuery(count, new Fields("followerClass", "args"), new MapGet(), new Fields("count")) ; return topology.build(); }
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); TridentState count = topology .newStream("tweets", spout) .each(new Fields("str"), new ParseTweet(), new Fields("text", "content", "user")) .project(new Fields("content", "user")) .each(new Fields("content"), new OnlyHashtags()) .each(new Fields("user"), new OnlyEnglish()) .each(new Fields("content", "user"), new ExtractFollowerClassAndContentName(), new Fields("followerClass", "contentName")) .groupBy(new Fields("followerClass", "contentName")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")) ; topology .newDRPCStream("top_hashtags") .stateQuery(count, new TupleCollectionGet(), new Fields("followerClass", "contentName")) .stateQuery(count, new Fields("followerClass", "contentName"), new MapGet(), new Fields("count")) .aggregate(new Fields("contentName", "count"), new FirstN.FirstNSortedAgg(5,"count", true), new Fields("contentName", "count")) ; return topology.build(); }
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); TridentState count = topology .newStream("tweets", spout) .each(new Fields("str"), new ParseTweet(), new Fields("text", "content", "user")) .project(new Fields("content", "user")) .each(new Fields("content"), new OnlyHashtags()) .each(new Fields("user"), new OnlyEnglish()) .each(new Fields("content", "user"), new ExtractFollowerClassAndContentName(), new Fields("followerClass", "contentName")) .groupBy(new Fields("followerClass", "contentName")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")) ; topology .newDRPCStream("hashtag_count") .stateQuery(count, new TupleCollectionGet(), new Fields("followerClass", "contentName")) .stateQuery(count, new Fields("followerClass", "contentName"), new MapGet(), new Fields("count")) .groupBy(new Fields("followerClass")) .aggregate(new Fields("contentName", "count"), new FirstN.FirstNSortedAgg(1,"count", true), new Fields("contentName", "count")) ; return topology.build(); }
public static StormTopology buildTopology(TransactionalTridentKafkaSpout spout) throws IOException { TridentTopology topology = new TridentTopology(); TridentState count = topology .newStream("tweets", spout) .each(new Fields("str"), new ParseTweet(), new Fields("status", "content", "user")) .project(new Fields("content", "user", "status")) .each(new Fields("content"), new OnlyHashtags()) .each(new Fields("status"), new OnlyGeo()) .each(new Fields("status", "content"), new ExtractLocation(), new Fields("country", "contentName")) .groupBy(new Fields("country", "contentName")) .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")) ; topology .newDRPCStream("location_hashtag_count") .stateQuery(count, new TupleCollectionGet(), new Fields("country", "contentName")) .stateQuery(count, new Fields("country", "contentName"), new MapGet(), new Fields("count")) .groupBy(new Fields("country")) .aggregate(new Fields("contentName", "count"), new FirstN.FirstNSortedAgg(3,"count", true), new Fields("contentName", "count")) ; return topology.build(); }
.newDRPCStream("age_stats", drpc) .stateQuery(countState, new TupleCollectionGet(), new Fields("actor", "location")) .stateQuery(nameToAge, new Fields("actor"), new MapGet(), new Fields("age"))
public static StormTopology buildTopology(LocalDRPC drpc, StateFactory state) { FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence"), 3, new Values("the cow jumped over the moon"), new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"), new Values("how many apples can you eat"), new Values("to be or not to be the person")); spout.setCycle(true); TridentTopology topology = new TridentTopology(); TridentState wordCounts = topology.newStream("spout1", spout) .each(new Fields("sentence"), new Split(), new Fields("word")) .groupBy(new Fields("word")) .persistentAggregate(state, new Count(), new Fields("count")) .parallelismHint(6); topology.newDRPCStream("words", drpc) .each(new Fields("args"), new Split(), new Fields("word")) .groupBy(new Fields("word")) .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count")) .each(new Fields("count"), new FilterNull()) .aggregate(new Fields("count"), new Sum(), new Fields("sum")); return topology.build(); }