.withStringKeys() .withDoubleValues() .inMemory() .build();
.withStringKeys() .withValues(new VisibilityBindingSetSerde()) .persistent() .build(); builder.addStateStore(joinStoreSupplier, entry.getID());
public static void main(String[] args) throws IOException { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount-processor"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka0:19092"); props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "zookeeper0:12181/kafka"); props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); TopologyBuilder builder = new TopologyBuilder(); builder.addSource("SOURCE", new StringDeserializer(), new StringDeserializer(), "words") .addProcessor("WordCountProcessor", WordCountProcessor::new, "SOURCE") .addStateStore(Stores.create("Counts").withStringKeys().withIntegerValues().inMemory().build(), "WordCountProcessor") // .connectProcessorAndStateStores("WordCountProcessor", "Counts") .addSink("SINK", "count", new StringSerializer(), new IntegerSerializer(), "WordCountProcessor"); KafkaStreams stream = new KafkaStreams(builder, props); stream.start(); System.in.read(); stream.close(); stream.cleanUp(); }
public static void main(String[] args) { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); TopologyBuilder builder = new TopologyBuilder(); JsonSerializer<StockTransactionSummary> stockTxnSummarySerializer = new JsonSerializer<>(); JsonDeserializer<StockTransactionSummary> stockTxnSummaryDeserializer = new JsonDeserializer<>(StockTransactionSummary.class); JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class); JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>(); StringSerializer stringSerializer = new StringSerializer(); StringDeserializer stringDeserializer = new StringDeserializer(); Serde<StockTransactionSummary> stockTransactionSummarySerde = Serdes.serdeFrom(stockTxnSummarySerializer,stockTxnSummaryDeserializer); builder.addSource("stocks-source", stringDeserializer, stockTxnDeserializer, "stocks") .addProcessor("summary", StockSummaryProcessor::new, "stocks-source") .addStateStore(Stores.create("stock-transactions").withStringKeys() .withValues(stockTransactionSummarySerde).inMemory().maxEntries(100).build(),"summary") .addSink("sink", "stocks-out", stringSerializer,stockTxnJsonSerializer,"stocks-source") .addSink("sink-2", "transaction-summary", stringSerializer, stockTxnSummarySerializer, "summary"); System.out.println("Starting StockSummaryStatefulProcessor Example"); KafkaStreams streaming = new KafkaStreams(builder, streamingConfig); streaming.start(); System.out.println("StockSummaryStatefulProcessor Example now started"); }