/** * This will take the contents of source map and writes it into the sink map. */ private static Pipeline mapSourceAndSink(String sourceMapName, String sinkMapName) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.map(sourceMapName)) .drainTo(Sinks.map(sinkMapName)); return pipeline; }
public static StreamSource<BufferedImage> webcam(long pollIntervalMillis) { return SourceBuilder.timestampedStream("webcam", ctx -> new WebcamSource(pollIntervalMillis)) .fillBufferFn(WebcamSource::addToBufferFn) .destroyFn(WebcamSource::close) .build(); }
/** * Builds a new pipeline stage that performs the hash-join operation. Attaches * the stage to all the contributing stages. * * @return the new hash-join pipeline stage */ public <R> StreamStage<R> build(DistributedBiFunction<T0, ItemsByTag, R> mapToOutputFn) { return (StreamStage<R>) build0(mapToOutputFn); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Trade::getTicker) .rollingAggregate(summingLong(Trade::getPrice)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .addTimestamps(Trade::getTime, 3000) .groupingKey(Trade::getTicker) .window(WindowDefinition.sliding(SLIDING_WINDOW_LENGTH_MILLIS, SLIDE_STEP_MILLIS)) .aggregate(counting(), (winStart, winEnd, key, result) -> String.format("%s %5s %4d", toLocalTime(winEnd), key, result)) .drainTo(Sinks.logger()); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline groupAndAggregate() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .window(sliding(10, 1)) .groupingKey(pv -> pv.userId()) .aggregate(toList()) .drainTo(Sinks.logger()); return p; }
private static Pipeline buildPipeline(String sourceDir, String targetDir) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.files(sourceDir)) .map(LogLine::parse) .filter((LogLine log) -> log.getResponseCode() >= 200 && log.getResponseCode() < 400) .flatMap(AccessLogAnalyzer::explodeSubPaths) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(Sinks.files(targetDir)); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline aggregate() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .window(sliding(10, 1)) .aggregate(counting()) .drainTo(Sinks.logger()); return p; }
public static Pipeline build() { Pipeline p = Pipeline.create(); // Palladium and Platinum only p.drawFrom(Sources.<String, Object>mapJournal( Constants.IMAP_NAME_PRECIOUS, JournalInitialPosition.START_FROM_OLDEST) ).map(e -> e.getKey() + "==" + e.getValue()) .filter(str -> str.toLowerCase().startsWith("p")) .drainTo(Sinks.logger()) ; return p; }
private static Pipeline buildPipeline(String connectionUrl) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jdbc(connectionUrl, "SELECT * FROM " + TABLE_NAME, resultSet -> new User(resultSet.getInt(1), resultSet.getString(2)))) .map(user -> Util.entry(user.getId(), user)) .drainTo(Sinks.map(MAP_NAME)); return p; }
private Job newJob() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.mapJournal(SOURCE_NAME, START_FROM_OLDEST)) .withoutTimestamps() .drainTo(Sinks.list(SINK_NAME)); return jet.newJob(p, new JobConfig().setName("job-infinite-pipeline")); }
/** * Convenience for {@link FileSourceBuilder#build(DistributedBiFunction)}. * Source emits lines to downstream without any transformation. */ public BatchSource<String> build() { return build((filename, line) -> line); }
/** * Convenience for {@link FileSourceBuilder#buildWatcher(DistributedBiFunction)}. */ public StreamSource<String> buildWatcher() { return buildWatcher((filename, line) -> line); }
/** * Returns a copy of this join clause, but with the right-hand projection * function replaced with the supplied one. */ public <T1_NEW_OUT> JoinClause<K, T0, T1, T1_NEW_OUT> projecting( DistributedFunction<? super T1, ? extends T1_NEW_OUT> rightProjectFn ) { return new JoinClause<>(this.leftKeyFn, this.rightKeyFn, rightProjectFn); }
/** * Convenience for {@link JmsSourceBuilder#build(DistributedFunction)}. */ public StreamSource<Message> build() { return build(message -> message); }
/** * Builds a new pipeline stage that performs the hash-join operation. Attaches * the stage to all the contributing stages. * * @return the new hash-join pipeline stage */ public <R> BatchStage<R> build(DistributedBiFunction<T0, ItemsByTag, R> mapToOutputFn) { return (BatchStage<R>) build0(mapToOutputFn); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Entry<String, Integer>, Integer, Entry<String, Integer>>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Entry::getKey) .rollingAggregate(summingLong(Entry::getValue)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }