/** * Construct an instance with the supplied streams configuration and * clean up configuration. * @param streamsConfig the streams configuration. * @param cleanupConfig the cleanup configuration. * @since 2.1.2. * @deprecated in favor of {@link #StreamsBuilderFactoryBean(KafkaStreamsConfiguration, CleanupConfig)}. */ @Deprecated public StreamsBuilderFactoryBean(Map<String, Object> streamsConfig, CleanupConfig cleanupConfig) { Assert.notNull(streamsConfig, STREAMS_CONFIG_MUST_NOT_BE_NULL); Assert.notNull(cleanupConfig, CLEANUP_CONFIG_MUST_NOT_BE_NULL); this.streamsConfig = new StreamsConfig(streamsConfig); this.cleanupConfig = cleanupConfig; }
@Test public void testStreamsBuilderFactoryWithConfigProvidedLater() { Map<String, Object> props = new HashMap<>(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddresses); StreamsConfig streamsConfig = new StreamsConfig(props); streamsBuilderFactoryBean.setStreamsConfig(streamsConfig); assertThat(streamsBuilderFactoryBean.isRunning()).isFalse(); streamsBuilderFactoryBean.start(); assertThat(streamsBuilderFactoryBean.isRunning()).isTrue(); }
return new KafkaStreams(builder.build(), new StreamsConfig(config));
/** * Create a {@code KafkaStreams} instance. * <p> * Note: even if you never call {@link #start()} on a {@code KafkaStreams} instance, * you still must {@link #close()} it to avoid resource leaks. * * @param topology the topology specifying the computational logic * @param props properties for {@link StreamsConfig} * @param clientSupplier the Kafka clients supplier which provides underlying producer and consumer clients * for the new {@code KafkaStreams} instance * @throws StreamsException if any fatal error occurs */ public KafkaStreams(final Topology topology, final Properties props, final KafkaClientSupplier clientSupplier) { this(topology.internalTopologyBuilder, new StreamsConfig(props), clientSupplier, Time.SYSTEM); }
/** * Create a {@code KafkaStreams} instance. * <p> * Note: even if you never call {@link #start()} on a {@code KafkaStreams} instance, * you still must {@link #close()} it to avoid resource leaks. * * @param topology the topology specifying the computational logic * @param props properties for {@link StreamsConfig} * @param clientSupplier the Kafka clients supplier which provides underlying producer and consumer clients * for the new {@code KafkaStreams} instance * @param time {@code Time} implementation; cannot be null * @throws StreamsException if any fatal error occurs */ public KafkaStreams(final Topology topology, final Properties props, final KafkaClientSupplier clientSupplier, final Time time) { this(topology.internalTopologyBuilder, new StreamsConfig(props), clientSupplier, time); }
return new KafkaStreams(builder.build(), new StreamsConfig(streamsConfiguration));
@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME) public StreamsConfig streamsConfig() { Map<String, Object> props = new HashMap<>(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); return new StreamsConfig(props); }
private StreamsConfig toStreamsConfig(Config config) { val props = new Properties(); config.entrySet().forEach(e -> props.setProperty(e.getKey(), config.getString(e.getKey()))); return new StreamsConfig(props); } }
public KafkaStreams constructStreamsInstance() { // Validate and set bootstrap servers config if (!originalConfig.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) { effectiveConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, getBootstrapServers()); } // Override with original config effectiveConfig.putAll(originalConfig); return new KafkaStreams(kstreamBuilder, new StreamsConfig(effectiveConfig)); }
/** * Create a {@code KafkaStreams} instance. * <p> * Note: even if you never call {@link #start()} on a {@code KafkaStreams} instance, * you still must {@link #close()} it to avoid resource leaks. * * @param topology the topology specifying the computational logic * @param props properties for {@link StreamsConfig} * @throws StreamsException if any fatal error occurs */ public KafkaStreams(final Topology topology, final Properties props) { this(topology.internalTopologyBuilder, new StreamsConfig(props), new DefaultKafkaClientSupplier()); }
/** * Create a {@code KafkaStreams} instance. * <p> * Note: even if you never call {@link #start()} on a {@code KafkaStreams} instance, * you still must {@link #close()} it to avoid resource leaks. * * @param topology the topology specifying the computational logic * @param props properties for {@link StreamsConfig} * @param time {@code Time} implementation; cannot be null * @throws StreamsException if any fatal error occurs */ public KafkaStreams(final Topology topology, final Properties props, final Time time) { this(topology.internalTopologyBuilder, new StreamsConfig(props), new DefaultKafkaClientSupplier(), time); }
@Override public KafkaStreams make(final String ryaInstance, final StreamsQuery query) throws KafkaStreamsFactoryException { requireNonNull(ryaInstance); requireNonNull(query); // Setup the Kafka Stream program. final Properties streamsProps = new Properties(); // Configure the Kafka servers that will be talked to. streamsProps.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServersConfig); // Use the Query ID as the Application ID to ensure we resume where we left off the last time this command was run. streamsProps.put(StreamsConfig.APPLICATION_ID_CONFIG, "RyaStreams-Query-" + query.getQueryId()); // Always start at the beginning of the input topic. streamsProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // Setup the topology that processes the Query. final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance); final String resultsTopic = KafkaTopics.queryResultsTopic(ryaInstance, query.getQueryId()); try { final TopologyBuilder topologyBuilder = topologyFactory.build(query.getSparql(), statementsTopic, resultsTopic, new RandomUUIDFactory()); return new KafkaStreams(topologyBuilder, new StreamsConfig(streamsProps)); } catch (final MalformedQueryException | TopologyBuilderException e) { throw new KafkaStreamsFactoryException("Could not create a KafkaStreams processing topology for query " + query.getQueryId(), e); } } }
@Override public GraphAlgorithmState<Void> configure(StreamsBuilder builder, Properties streamsConfig) { ClientUtils.createTopic(solutionSetTopic, numPartitions, replicationFactor, streamsConfig); ClientUtils.createTopic(workSetTopic, numPartitions, replicationFactor, streamsConfig); computation.prepare(builder, streamsConfig); Topology topology = builder.build(); log.info("Topology description {}", topology.describe()); streams = new KafkaStreams(topology, new StreamsConfig(streamsConfig), new PregelClientSupplier()); streams.start(); return new GraphAlgorithmState<>(streams, GraphAlgorithmState.State.CREATED, 0, 0L, null); }
@Override public void start() throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<EventEnvelope> envelopeSerde = initializeEnvelopeSerde(); Predicate<String, EventEnvelope> inventoryItemCreated = (k, v) -> k.equals(InventoryItemCreated.class.getSimpleName()); Predicate<String, EventEnvelope> inventoryItemRenamed = (k, v) -> k.equals(InventoryItemRenamed.class.getSimpleName()); Predicate<String, EventEnvelope> inventoryItemDeactivated = (k, v) -> k.equals(InventoryItemDeactivated.class.getSimpleName()); KStreamBuilder builder = new KStreamBuilder(); KStream<String, EventEnvelope>[] filteredStreams = builder .stream(Serdes.String(), envelopeSerde, INVENTORY_ITEM_TOPIC) .selectKey((k, v) -> v.eventType) .branch(inventoryItemCreated, inventoryItemRenamed, inventoryItemDeactivated); filteredStreams[0].process(InventoryItemCreatedHandler::new); filteredStreams[1].process(InventoryItemRenamedHandler::new); filteredStreams[2].process(InventoryItemDeactivatedHandler::new); kafkaStreams = new KafkaStreams(builder, streamsConfig); kafkaStreams.cleanUp(); // -- only because we are using in-memory kafkaStreams.start(); }
public static void main(String[] args) { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); KStreamBuilder kStreamBuilder = new KStreamBuilder(); KStream<String, String> patternStreamI = kStreamBuilder.stream(Serdes.String(), Serdes.String(), Pattern.compile("topic-\\d")); KStream<String, String> namedTopicKStream = kStreamBuilder.stream(Serdes.String(), Serdes.String(), "topic-Z"); KStream<String, String> patternStreamII = kStreamBuilder.stream(Serdes.String(), Serdes.String(), Pattern.compile("topic-[A-Y]+")); patternStreamI.print("pattern-\\d"); namedTopicKStream.print("topic-Z"); patternStreamII.print("topic-[A-Y]+"); System.out.println("Starting stream regex consumer Example"); KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder, streamingConfig); kafkaStreams.start(); }
public void run() { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); JsonSerializer<Tweet> tweetJsonSerializer = new JsonSerializer<>(); JsonDeserializer<Tweet> tweetJsonDeserializer = new JsonDeserializer<>(Tweet.class); Serde<Tweet> tweetSerde = Serdes.serdeFrom(tweetJsonSerializer, tweetJsonDeserializer); KStreamBuilder kStreamBuilder = new KStreamBuilder(); Classifier classifier = new Classifier(); classifier.train(new File("src/main/resources/kafkaStreamsTwitterTrainingData_clean.csv")); KeyValueMapper<String, Tweet, String> languageToKey = (k, v) -> StringUtils.isNotBlank(v.getText()) ? classifier.classify(v.getText()):"unknown"; Predicate<String, Tweet> isEnglish = (k, v) -> k.equals("english"); Predicate<String, Tweet> isFrench = (k, v) -> k.equals("french"); Predicate<String, Tweet> isSpanish = (k, v) -> k.equals("spanish"); KStream<String, Tweet> tweetKStream = kStreamBuilder.stream(Serdes.String(), tweetSerde, "twitterData"); KStream<String, Tweet>[] filteredStreams = tweetKStream.selectKey(languageToKey).branch(isEnglish, isFrench, isSpanish); filteredStreams[0].to(Serdes.String(), tweetSerde, "english"); filteredStreams[1].to(Serdes.String(), tweetSerde, "french"); filteredStreams[2].to(Serdes.String(), tweetSerde, "spanish"); kafkaStreams = new KafkaStreams(kStreamBuilder, streamsConfig); System.out.println("Starting twitter analysis streams"); kafkaStreams.start(); System.out.println("Started"); }
public static void main(String[] args) throws Exception { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); JsonDeserializer<Purchase> purchaseJsonDeserializer = new JsonDeserializer<>(Purchase.class); JsonSerializer<Purchase> purchaseJsonSerializer = new JsonSerializer<>(); JsonSerializer<RewardAccumulator> rewardAccumulatorJsonSerializer = new JsonSerializer<>(); JsonSerializer<PurchasePattern> purchasePatternJsonSerializer = new JsonSerializer<>(); StringDeserializer stringDeserializer = new StringDeserializer(); StringSerializer stringSerializer = new StringSerializer(); TopologyBuilder topologyBuilder = new TopologyBuilder(); topologyBuilder.addSource("SOURCE", stringDeserializer, purchaseJsonDeserializer, "src-topic") .addProcessor("PROCESS", CreditCardAnonymizer::new, "SOURCE") .addProcessor("PROCESS2", PurchasePatterns::new, "PROCESS") .addProcessor("PROCESS3", CustomerRewards::new, "PROCESS") .addSink("SINK", "patterns", stringSerializer, purchasePatternJsonSerializer, "PROCESS2") .addSink("SINK2", "rewards",stringSerializer, rewardAccumulatorJsonSerializer, "PROCESS3") .addSink("SINK3", "purchases", stringSerializer, purchaseJsonSerializer, "PROCESS"); System.out.println("Starting PurchaseProcessor Example"); KafkaStreams streaming = new KafkaStreams(topologyBuilder, streamingConfig); streaming.start(); System.out.println("Now started PurchaseProcessor Example"); }
public static void main(String[] args) { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); TopologyBuilder builder = new TopologyBuilder(); JsonSerializer<StockTransactionSummary> stockTxnSummarySerializer = new JsonSerializer<>(); JsonDeserializer<StockTransactionSummary> stockTxnSummaryDeserializer = new JsonDeserializer<>(StockTransactionSummary.class); JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class); JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>(); StringSerializer stringSerializer = new StringSerializer(); StringDeserializer stringDeserializer = new StringDeserializer(); Serde<StockTransactionSummary> stockTransactionSummarySerde = Serdes.serdeFrom(stockTxnSummarySerializer,stockTxnSummaryDeserializer); builder.addSource("stocks-source", stringDeserializer, stockTxnDeserializer, "stocks") .addProcessor("summary", StockSummaryProcessor::new, "stocks-source") .addStateStore(Stores.create("stock-transactions").withStringKeys() .withValues(stockTransactionSummarySerde).inMemory().maxEntries(100).build(),"summary") .addSink("sink", "stocks-out", stringSerializer,stockTxnJsonSerializer,"stocks-source") .addSink("sink-2", "transaction-summary", stringSerializer, stockTxnSummarySerializer, "summary"); System.out.println("Starting StockSummaryStatefulProcessor Example"); KafkaStreams streaming = new KafkaStreams(builder, streamingConfig); streaming.start(); System.out.println("StockSummaryStatefulProcessor Example now started"); }
public static void main(String[] args) { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); JsonDeserializer<Purchase> purchaseJsonDeserializer = new JsonDeserializer<>(Purchase.class); JsonSerializer<Purchase> purchaseJsonSerializer = new JsonSerializer<>(); JsonSerializer<RewardAccumulator> rewardAccumulatorJsonSerializer = new JsonSerializer<>(); JsonDeserializer<RewardAccumulator> rewardAccumulatorJsonDeserializer = new JsonDeserializer<>(RewardAccumulator.class); Serde<RewardAccumulator> rewardAccumulatorSerde = Serdes.serdeFrom(rewardAccumulatorJsonSerializer,rewardAccumulatorJsonDeserializer); JsonSerializer<PurchasePattern> purchasePatternJsonSerializer = new JsonSerializer<>(); JsonDeserializer<PurchasePattern> purchasePatternJsonDeserializer = new JsonDeserializer<>(PurchasePattern.class); Serde<PurchasePattern> purchasePatternSerde = Serdes.serdeFrom(purchasePatternJsonSerializer,purchasePatternJsonDeserializer); Serde<Purchase> purchaseSerde = Serdes.serdeFrom(purchaseJsonSerializer,purchaseJsonDeserializer); Serde<String> stringSerde = Serdes.String(); KStreamBuilder kStreamBuilder = new KStreamBuilder(); KStream<String,Purchase> purchaseKStream = kStreamBuilder.stream(stringSerde,purchaseSerde,"src-topic") .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()).to(stringSerde,purchasePatternSerde,"patterns"); purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()).to(stringSerde,rewardAccumulatorSerde,"rewards"); purchaseKStream.to(stringSerde,purchaseSerde,"purchases"); System.out.println("Starting PurchaseStreams Example"); KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder,streamsConfig); kafkaStreams.start(); System.out.println("Now started PurchaseStreams Example"); }
public static void main(String[] args) { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); JsonSerializer<StockTransactionCollector> stockTransactionsSerializer = new JsonSerializer<>(); JsonDeserializer<StockTransactionCollector> stockTransactionsDeserializer = new JsonDeserializer<>(StockTransactionCollector.class); JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class); JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>(); Serde<StockTransaction> transactionSerde = Serdes.serdeFrom(stockTxnJsonSerializer,stockTxnDeserializer); StringSerializer stringSerializer = new StringSerializer(); StringDeserializer stringDeserializer = new StringDeserializer(); Serde<String> stringSerde = Serdes.serdeFrom(stringSerializer,stringDeserializer); Serde<StockTransactionCollector> collectorSerde = Serdes.serdeFrom(stockTransactionsSerializer,stockTransactionsDeserializer); WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerializer); WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringDeserializer); Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer,windowedDeserializer); KStreamBuilder kStreamBuilder = new KStreamBuilder(); KStream<String,StockTransaction> transactionKStream = kStreamBuilder.stream(stringSerde,transactionSerde,"stocks"); transactionKStream.map((k,v)-> new KeyValue<>(v.getSymbol(),v)) .through(stringSerde, transactionSerde,"stocks-out") .groupBy((k,v) -> k, stringSerde, transactionSerde) .aggregate(StockTransactionCollector::new, (k, v, stockTransactionCollector) -> stockTransactionCollector.add(v), TimeWindows.of(10000), collectorSerde, "stock-summaries") .to(windowedSerde,collectorSerde,"transaction-summary"); System.out.println("Starting StockStreams Example"); KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder,streamingConfig); kafkaStreams.start(); System.out.println("Now started StockStreams Example"); }