public static Pipeline buildPipeline(String sourceName, String sinkName) { Pattern pattern = Pattern.compile("\\W+"); Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.<Integer, String>map(sourceName)) .flatMap(e -> Traversers.traverseArray(pattern.split(e.getValue().toLowerCase())) .filter(w -> !w.isEmpty())) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(Sinks.map(sinkName)); return pipeline; }
private static Pipeline buildPipeline() { Pattern delimiter = Pattern.compile("\\W+"); Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Long, String>map(BOOK_LINES)) .flatMap(e -> traverseArray(delimiter.split(e.getValue().toLowerCase()))) .filter(word -> !word.isEmpty()) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(Sinks.map(COUNTS)); return p; }
/** * This will take the contents of source map and writes it into the sink map. */ private static Pipeline mapSourceAndSink(String sourceMapName, String sinkMapName) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.map(sourceMapName)) .drainTo(Sinks.map(sinkMapName)); return pipeline; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline coGroupDirect() { Pipeline p = Pipeline.create(); // Create three source streams BatchStageWithKey<PageVisit, Integer> pageVisits = p.drawFrom(Sources.<PageVisit>list(PAGE_VISIT)) .groupingKey(pageVisit -> pageVisit.userId()); BatchStageWithKey<AddToCart, Integer> addToCarts = p.drawFrom(Sources.<AddToCart>list(ADD_TO_CART)) .groupingKey(addToCart -> addToCart.userId()); BatchStageWithKey<Payment, Integer> payments = p.drawFrom(Sources.<Payment>list(PAYMENT)) .groupingKey(payment -> payment.userId()); // Construct the co-group transform. The aggregate operation collects all // the stream items into a 3-tuple of lists. BatchStage<Entry<Integer, Tuple3<List<PageVisit>, List<AddToCart>, List<Payment>>>> coGrouped = pageVisits.aggregate3(toList(), addToCarts, toList(), payments, toList()); // Store the results in the output map coGrouped.drainTo(Sinks.map(RESULT)); return p; }
.map(entryKey()) // extract file name .aggregate(AggregateOperations.toSet()) // set of unique file names .map(Set::size) // extract size of the set .map(Math::log); // calculate logarithm of it .flatMap(entry -> return stopwords.contains(word) ? null : entry(entry.getKey(), word); })) .groupingKey(entryValue()) // entry value is the word .aggregate(AggregateOperations.toMap(entryKey(), e -> 1L, Long::sum)); tf.hashJoin( logDocCount, JoinClause.onKeys(constantKey(), constantKey()), (tfVal, logDocCountVal) -> toInvertedIndexEntry( logDocCountVal, tfVal.getKey(), tfVal.getValue().entrySet())) .drainTo(Sinks.map(INVERTED_INDEX));
/** * Helper method to construct the pipeline for the job * * @return the pipeline for the job */ public static Pipeline buildPipeline() { final Pipeline p = Pipeline.create(); // Compute map server side final BatchStage<Horse> c = p.drawFrom(Sources.map(EVENTS_BY_NAME, t -> true, HORSE_FROM_EVENT)); final BatchStage<Entry<Horse, Long>> c2 = c.groupingKey(wholeItem()) .aggregate(counting()) .filter(ent -> ent.getValue() > 1); c2.drainTo(Sinks.map(MULTIPLE)); return p; }
private static Pipeline buildPipeline(JobConf jobConfig) { Pipeline p = Pipeline.create(); p.drawFrom(HdfsSources.<AvroWrapper<User>, NullWritable>hdfs(jobConfig)) .filter(entry -> entry.getKey().datum().get(3).equals(Boolean.TRUE)) .peek(entry -> entry.getKey().datum().toString()) .drainTo(HdfsSinks.hdfs(jobConfig)); return p; }
/** * Attaches a stage that emits just the items that are distinct according * to their definition of equality ({@code equals} and {@code hashCode}). * There is no guarantee which one of equal items it will emit. * * @return the newly attached stage */ @Nonnull default BatchStage<T> distinct() { return groupingKey(wholeItem()).distinct(); }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); NettyServer nettyServer = new NettyServer(PORT, DistributedConsumer.noop(), msg -> COUNTER.incrementAndGet()); nettyServer.start(); JetInstance jet = Jet.newJetInstance(); Jet.newJetInstance(); try { System.out.println("Populating map..."); IMapJet<Integer, Integer> map = jet.getMap(SOURCE_NAME); IntStream.range(0, SOURCE_ITEM_COUNT).parallel().forEach(i -> map.put(i, i)); Pipeline p = Pipeline.create(); p.drawFrom(Sources.map(SOURCE_NAME)) .drainTo(Sinks.socket(HOST, PORT, e -> e.getValue().toString(), UTF_8)); System.out.println("Executing job..."); jet.newJob(p).join(); } finally { nettyServer.stop(); Jet.shutdownAll(); } System.out.println("Server read " + COUNTER.get() + " items from the socket."); } }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline coGroupDirect() { Pipeline p = Pipeline.create(); // Create three source streams BatchStageWithKey<PageVisit, Integer> pageVisits = p.drawFrom(Sources.<PageVisit>list(PAGE_VISIT)) .groupingKey(pageVisit -> pageVisit.userId()); BatchStageWithKey<AddToCart, Integer> addToCarts = p.drawFrom(Sources.<AddToCart>list(ADD_TO_CART)) .groupingKey(addToCart -> addToCart.userId()); BatchStageWithKey<Payment, Integer> payments = p.drawFrom(Sources.<Payment>list(PAYMENT)) .groupingKey(payment -> payment.userId()); // Construct the co-group transform. The aggregate operation collects all // the stream items into a 3-tuple of lists. BatchStage<Entry<Integer, Tuple3<List<PageVisit>, List<AddToCart>, List<Payment>>>> coGrouped = pageVisits.aggregate3(toList(), addToCarts, toList(), payments, toList()); // Store the results in the output map coGrouped.drainTo(Sinks.map(RESULT)); return p; }
/** * Builds and returns the Pipeline which represents the actual computation. * To compute the probability of finding word B after A, one has to know * how many pairs contain word A as a first entry and how many of them * contain B as a second entry. The pipeline creates pairs from consecutive * words and computes the probabilities of A->B. */ private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); // Reads files line-by-line BatchStage<String> lines = p.drawFrom(Sources.<String>files(INPUT_FILE)); Pattern twoWords = Pattern.compile("(\\.|\\w+)\\s(\\.|\\w+)"); // Calculates probabilities by flatmapping lines into two-word consecutive pairs using regular expressions // and aggregates them into an IMap. lines.flatMap(e -> traverseMatcher(twoWords.matcher(e.toLowerCase()), m -> tuple2(m.group(1), m.group(2)))) .groupingKey(Tuple2::f0) .aggregate(buildAggregateOp()) .drainTo(Sinks.map("stateTransitions")); return p; }
private static Pipeline buildPipeline(String sourceDir, String targetDir) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.files(sourceDir)) .map(LogLine::parse) .filter((LogLine log) -> log.getResponseCode() >= 200 && log.getResponseCode() < 400) .flatMap(AccessLogAnalyzer::explodeSubPaths) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(Sinks.files(targetDir)); return p; }
public static void main(String[] args) { ApplicationContext context = new AnnotationConfigApplicationContext(AppConfig.class); JetInstance jet = context.getBean(JetInstance.class); Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(CustomSourceP.customSource()) .drainTo(Sinks.logger()); JobConfig jobConfig = new JobConfig() .addClass(AnnotationBasedConfigurationSample.class) .addClass(CustomSourceP.class); jet.newJob(pipeline, jobConfig).join(); jet.shutdown(); } }
/** * This will take the contents of source map, converts values to the string and * suffixes the value with {@code odd} if the key is odd and with {@code event} if the key is even. */ private static Pipeline mapWithUpdating(String sourceMapName, String sinkMapName) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.<Integer, Integer>map(sourceMapName)) .map(e -> entry(e.getKey(), String.valueOf(e.getValue()))) .drainTo( Sinks.mapWithUpdating( sinkMapName, (oldValue, item) -> item.getKey() % 2 == 0 ? oldValue + "-even" : oldValue + "-odd" ) ); return pipeline; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline coGroupBuild() { Pipeline p = Pipeline.create(); // Create three source streams BatchStageWithKey<PageVisit, Integer> pageVisits = p.drawFrom(Sources.<PageVisit>list(PAGE_VISIT)) .groupingKey(pageVisit -> pageVisit.userId()); BatchStageWithKey<AddToCart, Integer> addToCarts = p.drawFrom(Sources.<AddToCart>list(ADD_TO_CART)) .groupingKey(addToCart -> addToCart.userId()); BatchStageWithKey<Payment, Integer> payments = p.drawFrom(Sources.<Payment>list(PAYMENT)) .groupingKey(payment -> payment.userId()); // Obtain a builder object for the co-group transform GroupAggregateBuilder<Integer, List<PageVisit>> builder = pageVisits.aggregateBuilder(toList()); Tag<List<PageVisit>> visitTag = builder.tag0(); // Add the co-grouped streams to the builder. Here we add just two, but // you could add any number of them. Tag<List<AddToCart>> cartTag = builder.add(addToCarts, toList()); Tag<List<Payment>> payTag = builder.add(payments, toList()); // Build the co-group transform. The aggregate operation collects all the // stream items into an accumulator class called ItemsByTag. We transform // it into a 3-tuple of lists. BatchStage<Entry<Integer, Tuple3<List<PageVisit>, List<AddToCart>, List<Payment>>>> coGrouped = builder.build((key, res) -> entry(key, tuple3(res.get(visitTag), res.get(cartTag), res.get(payTag)))); // Store the results in the output map coGrouped.drainTo(Sinks.map(RESULT)); return p; }
.flatMap(line -> traverseArray(delimiter.split(line.toLowerCase())).filter(w -> !w.isEmpty())) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(HdfsSinks.hdfs(jobConfig));
@RequestMapping("/submitJob") public void submitJob() { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(CustomSourceP.customSource()) .drainTo(Sinks.logger()); JobConfig jobConfig = new JobConfig() .addClass(SpringBootSample.class) .addClass(CustomSourceP.class); instance.newJob(pipeline, jobConfig).join(); }
private static Pipeline buildPipeline(String connectionUrl) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jdbc(connectionUrl, "SELECT * FROM " + TABLE_NAME, resultSet -> new User(resultSet.getInt(1), resultSet.getString(2)))) .map(user -> Util.entry(user.getId(), user)) .drainTo(Sinks.map(MAP_NAME)); return p; }