/** * This will take the contents of source map, converts values to the string and * suffixes the value with {@code odd} if the key is odd and with {@code event} if the key is even. */ private static Pipeline mapWithUpdating(String sourceMapName, String sinkMapName) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.<Integer, Integer>map(sourceMapName)) .map(e -> entry(e.getKey(), String.valueOf(e.getValue()))) .drainTo( Sinks.mapWithUpdating( sinkMapName, (oldValue, item) -> item.getKey() % 2 == 0 ? oldValue + "-even" : oldValue + "-odd" ) ); return pipeline; }
private static Pipeline buildPipeline(String connectionUrl) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jdbc(connectionUrl, "SELECT * FROM " + TABLE_NAME, resultSet -> new User(resultSet.getInt(1), resultSet.getString(2)))) .map(user -> Util.entry(user.getId(), user)) .drainTo(Sinks.map(MAP_NAME)); return p; }
private static Pipeline buildPipeline(String connectionUrl) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, User>map(MAP_NAME)) .map(Map.Entry::getValue) .drainTo(Sinks.jdbc("INSERT INTO " + TABLE_NAME + "(id, name) VALUES(?, ?)", connectionUrl, (stmt, user) -> { // Bind the values from the stream item to a PreparedStatement created from // the above query. stmt.setInt(1, user.getId()); stmt.setString(2, user.getName()); })); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<String, User>map(MAP_NAME)) .map(Map.Entry::getValue) .drainTo(AvroSinks.files(DIRECTORY_NAME, AvroSink::schemaForUser, User.class)); return p; }
/** * This will take the contents of source map, maps all keys to a key called {@code sum } * and write it into sink map using an merge function which merges the map values by adding * old value and new value. */ private static Pipeline mapWithMerging(String sourceMapName, String sinkMapName) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(Sources.<Integer, Integer>map(sourceMapName)) .map(e -> entry("sum", e.getValue())) .drainTo( Sinks.mapWithMerging( sinkMapName, (oldValue, newValue) -> oldValue + newValue ) ); return pipeline; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetInstance jet = Jet.newJetInstance(); try { IList<Integer> inputList = jet.getList(INPUT_LIST); for (int i = 0; i < ITEM_COUNT; i++) { inputList.add(i); } Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer>list(INPUT_LIST)) .map(i -> "item" + i) .drainTo(Sinks.list(RESULT_LIST)); jet.newJob(p).join(); IList<String> outputList = jet.getList(RESULT_LIST); System.out.println("Result list items: " + new ArrayList<>(outputList)); } finally { Jet.shutdownAll(); } } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(AvroSources.filesBuilder(AvroSink.DIRECTORY_NAME, ReflectDatumReader<User>::new) //Both Jet members share the same local file system .sharedFileSystem(true) .build()) .map(user -> Util.entry(user.getUsername(), user)) .drainTo(Sinks.map(AvroSink.MAP_NAME)); return p; }
private static Pipeline buildPipeline(String sourceDir, String targetDir) { Pipeline p = Pipeline.create(); p.drawFrom(Sources.files(sourceDir)) .map(LogLine::parse) .filter((LogLine log) -> log.getResponseCode() >= 200 && log.getResponseCode() < 400) .flatMap(AccessLogAnalyzer::explodeSubPaths) .groupingKey(wholeItem()) .aggregate(counting()) .drainTo(Sinks.files(targetDir)); return p; }