public static Pipeline build(String bootstrapServers) { Properties properties = new Properties(); properties.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); Pipeline pipeline = Pipeline.create(); pipeline .drawFrom(KafkaSources.kafka(properties, Constants.TOPIC_NAME_PRECIOUS)) .drainTo(Sinks.map(Constants.IMAP_NAME_PRECIOUS)); return pipeline; }
public static Pipeline build() { Pipeline p = Pipeline.create(); // Palladium and Platinum only p.drawFrom(Sources.<String, Object>mapJournal( Constants.IMAP_NAME_PRECIOUS, JournalInitialPosition.START_FROM_OLDEST) ).map(e -> e.getKey() + "==" + e.getValue()) .filter(str -> str.toLowerCase().startsWith("p")) .drainTo(Sinks.logger()) ; return p; }
private Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(KafkaSources.kafka(brokerProperties(), "t1", "t2")) .drainTo(Sinks.map(SINK_NAME)); return p; }
private Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(KafkaSources.kafka(brokerProperties(), TOPIC)) .drainTo(Sinks.logger()); return p; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); NettyServer nettyServer = new NettyServer(PORT, channel -> { for (int i; (i = COUNTER.getAndDecrement()) > 0; ) { channel.writeAndFlush(i + "\n"); } channel.close(); }, DistributedConsumer.noop()); nettyServer.start(); JetInstance jet = Jet.newJetInstance(); Jet.newJetInstance(); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.socket(HOST, PORT, UTF_8)) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p).join(); System.out.println("Jet received " + jet.getList(SINK_NAME).size() + " items from the socket"); } finally { nettyServer.stop(); Jet.shutdownAll(); } } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Entry<String, Integer>, Integer, Entry<String, Integer>>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Entry::getKey) .rollingAggregate(summingLong(Entry::getValue)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Trade::getTicker) .rollingAggregate(summingLong(Trade::getPrice)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>mapJournal(MAP_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); IMapJet<Integer, Integer> map = jet.getMap(MAP_NAME); for (int i = 0; i < 1000; i++) { map.set(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from map journal."); } finally { Jet.shutdownAll(); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .addTimestamps(Trade::getTime, 3000) .groupingKey(Trade::getTicker) .window(WindowDefinition.sliding(SLIDING_WINDOW_LENGTH_MILLIS, SLIDE_STEP_MILLIS)) .aggregate(counting(), (winStart, winEnd, key, result) -> String.format("%s %5s %4d", toLocalTime(winEnd), key, result)) .drainTo(Sinks.logger()); return p; }
/** * Builds and returns the Pipeline which represents the actual computation. */ private static Pipeline buildPipeline(String modelPath) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(WebcamSource.webcam(500)) .mapUsingContext(classifierContext(modelPath), (ctx, img) -> { Entry<String, Double> classification = classifyWithModel(ctx, img); return tuple3(img, classification.getKey(), classification.getValue()); } ) .window(tumbling(1000)) .aggregate(maxBy(comparingDouble(Tuple3::f2))) .drainTo(buildGUISink()); return pipeline; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>cacheJournal(CACHE_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); ICache<Integer, Integer> cache = jet.getCacheManager().getCache(CACHE_NAME); for (int i = 0; i < 1000; i++) { cache.put(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from cache journal."); } finally { Jet.shutdownAll(); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jmsTopic(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL), INPUT_TOPIC)) .filter(message -> message.getJMSPriority() > 3) .map(message -> (TextMessage) message) // print the message text to the log .peek(TextMessage::getText) .drainTo(Sinks.<TextMessage>jmsTopicBuilder(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL)) .destinationName(OUTPUT_TOPIC) .messageFn((session, message) -> { TextMessage textMessage = session.createTextMessage(message.getText()); textMessage.setBooleanProperty("isActive", true); textMessage.setJMSPriority(8); return textMessage; }) .build()); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jmsQueue(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL), INPUT_QUEUE)) .filter(message -> message.getJMSPriority() > 3) .map(message -> (TextMessage) message) // print the message text to the log .peek(TextMessage::getText) .drainTo(Sinks.<TextMessage>jmsQueueBuilder(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL)) .destinationName(OUTPUT_QUEUE) .messageFn((session, message) -> { // create new text message with the same text and few additional properties TextMessage textMessage = session.createTextMessage(message.getText()); textMessage.setBooleanProperty("isActive", true); textMessage.setJMSPriority(8); return textMessage; } ) .build()); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline aggregate() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .window(sliding(10, 1)) .aggregate(counting()) .drainTo(Sinks.logger()); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline groupAndAggregate() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .window(sliding(10, 1)) .groupingKey(pv -> pv.userId()) .aggregate(toList()) .drainTo(Sinks.logger()); return p; }
private Job newJob() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.mapJournal(SOURCE_NAME, START_FROM_OLDEST)) .withoutTimestamps() .drainTo(Sinks.list(SINK_NAME)); return jet.newJob(p, new JobConfig().setName("job-infinite-pipeline")); }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PriceUpdateEvent, String, Tuple2<Integer, Long>>mapJournal( "prices", mapPutEvents(), e -> new PriceUpdateEvent(e.getKey(), e.getNewValue().f0(), e.getNewValue().f1()), START_FROM_CURRENT )) .addTimestamps(PriceUpdateEvent::timestamp, LAG_SECONDS * 1000) .setLocalParallelism(1) .groupingKey(PriceUpdateEvent::ticker) .window(WindowDefinition.sliding(WINDOW_SIZE_SECONDS * 1000, 1000)) .aggregate(AggregateOperations.counting()) .drainTo(Sinks.logger()); return p; }
/** * This code is the main point of the sample: use the source builder to * create an HTTP source connector, then create a Jet pipeline that * performs windowed aggregation over its data. */ private static Pipeline buildPipeline() { StreamSource<TimestampedItem<Long>> usedMemorySource = SourceBuilder .timestampedStream("used-memory", x -> new PollHttp()) .fillBufferFn(PollHttp::fillBuffer) .destroyFn(PollHttp::close) .build(); Pipeline p = Pipeline.create(); p.drawFrom(usedMemorySource) .window(sliding(100, 20)) .aggregate(linearTrend(TimestampedItem::timestamp, TimestampedItem::item)) .map(tsItem -> entry(tsItem.timestamp(), tsItem.item())) .drainTo(Sinks.map(MAP_NAME)); return p; }
private static Pipeline buildPipeline() { // we'll calculate two aggregations over the same input data: // 1. number of viewed product listings // 2. set of purchased product IDs // Output of the aggregation will be List{Integer, Set<String>} AggregateOperation1<ProductEvent, ?, Tuple2<Long, Set<String>>> aggrOp = allOf( summingLong(e -> e.getProductEventType() == VIEW_LISTING ? 1 : 0), mapping(e -> e.getProductEventType() == PURCHASE ? e.getProductId() : null, toSet()) ); Pipeline p = Pipeline.create(); p.drawFrom(Sources.<ProductEvent>streamFromProcessor("generator", ProcessorMetaSupplier.of(GenerateEventsP::new, 1))) .addTimestamps(ProductEvent::getTimestamp, 0) .groupingKey(ProductEvent::getUserId) .window(WindowDefinition.session(SESSION_TIMEOUT)) .aggregate(aggrOp, SessionWindow::sessionToString) .drainTo(Sinks.logger()); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline coGroup() { Pipeline p = Pipeline.create(); StreamStageWithKey<PageVisit, Integer> pageVisits = p .drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .groupingKey(pv -> pv.userId()); StreamStageWithKey<Payment, Integer> payments = p .drawFrom(Sources.<Payment, Integer, Payment>mapJournal(PAYMENT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pm -> pm.timestamp(), 100) .groupingKey(pm -> pm.userId()); StreamStageWithKey<AddToCart, Integer> addToCarts = p .drawFrom(Sources.<AddToCart, Integer, AddToCart>mapJournal(ADD_TO_CART, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(atc -> atc.timestamp(), 100) .groupingKey(atc -> atc.userId()); StageWithKeyAndWindow<PageVisit, Integer> windowStage = pageVisits.window(sliding(10, 1)); StreamStage<TimestampedEntry<Integer, Tuple3<List<PageVisit>, List<AddToCart>, List<Payment>>>> coGrouped = windowStage.aggregate3(toList(), addToCarts, toList(), payments, toList()); coGrouped.drainTo(Sinks.logger()); return p; }