@Override protected void flush() { if (this.producer != null) { producer.flush(); } } }
@Override public void flush() { kafkaProducer.flush(); if (transactionalId != null) { flushNewPartitions(); } }
@Override public void flush() { kafkaProducer.flush(); if (transactionalId != null) { flushNewPartitions(); } }
@Override public void flush() { kafkaProducer.flush(); if (transactionalId != null) { flushNewPartitions(); } }
@Override public InteractiveProducer<K, V> write(ProducerRecord<K, V> record) { kafkaProducer.send(record); kafkaProducer.flush(); return this; }
@Override public InteractiveProducer<K, V> write(ProducerRecord<K, V> record) { producer.send(record); producer.flush(); return this; }
@Override public void close(boolean abort) throws IOException { if (abort) { LOG.info("Aborting is set to TRUE, Closing writerId [{}] without flush.", writerId); producer.close(0, TimeUnit.MICROSECONDS); return; } else { LOG.info("Flushing Kafka Producer with writerId [{}]", writerId); producer.flush(); LOG.info("Closing WriterId [{}]", writerId); producer.close(); } LOG.info("Closed WriterId [{}] Delivery semantic [{}], Topic[{}], Total sent Records [{}], Total Lost Records [{}]", writerId, writeSemantic, topic, sentRecords, lostRecords.get()); checkExceptions(); }
@Override public synchronized void stop() { try { if (this.producer != null) { try { this.producer.flush(); } finally { this.producer.close(); } } } finally { this.producer = null; super.stop(); } }
_producer.flush(); } catch (InterruptException ie) { if (_shutdown) {
@Override protected void storeRecord(HistoryRecord record) throws DatabaseHistoryException { if (this.producer == null) { throw new IllegalStateException("No producer is available. Ensure that 'start()' is called before storing database history records."); } logger.trace("Storing record into database history: {}", record); try { ProducerRecord<String, String> produced = new ProducerRecord<>(topicName, PARTITION, null, record.toString()); Future<RecordMetadata> future = this.producer.send(produced); // Flush and then wait ... this.producer.flush(); RecordMetadata metadata = future.get(); // block forever since we have to be sure this gets recorded if (metadata != null) { logger.debug("Stored record in topic '{}' partition {} at offset {} ", metadata.topic(), metadata.partition(), metadata.offset()); } } catch( InterruptedException e) { logger.trace("Interrupted before record was written into database history: {}", record); Thread.interrupted(); throw new DatabaseHistoryException(e); } catch (ExecutionException e) { throw new DatabaseHistoryException(e); } }
producer.flush();
ProducerRecord<K, V> record = messageSupplier.get(); producer.send(record); producer.flush(); LOGGER.debug("Producer {}: sent message {}", producerName, record);
producer.flush();
producer.flush(); producer.close();
private static void produceInput(String bootstrapServers) { final Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); final KafkaProducer<Integer, Integer> producer = new KafkaProducer<>(props); IntStream.range(0, 100) .mapToObj(val -> new ProducerRecord<>(SumLambdaExample.NUMBERS_TOPIC, val, val)) .forEach(producer::send); producer.flush(); }
private static void produceInputs(String bootstrapServers, String schemaRegistryUrl) throws IOException { final String[] users = {"erica", "bob", "joe", "damian", "tania", "phil", "sam", "lauren", "joseph"}; final Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); final KafkaProducer<String, WikiFeed> producer = new KafkaProducer<>(props); final Random random = new Random(); IntStream.range(0, random.nextInt(100)) .mapToObj(value -> new WikiFeed(users[random.nextInt(users.length)], true, "content")) .forEach( record -> producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, null, record))); producer.flush(); }
/** * Put messages to a Kafka topic. * * <p>Sends each message synchronously. * * @param topic The topic to send messages to. * @param messages The messages to send. * @param properties The properties to use with Kafka. * @return Metadata about all the records written to Kafka. */ private List<RecordMetadata> putMessages(String topic, List<String> messages, Properties properties) { LOG.debug("KAFKA_PUT sending messages; topic={}, count={}", topic, messages.size()); List<RecordMetadata> records = new ArrayList<>(); try (KafkaProducer<String, String> producer = new KafkaProducer<>(properties)) { List<Future<RecordMetadata>> futures = new ArrayList<>(); // send each message for(String msg : messages) { Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, msg)); futures.add(future); } // wait for the sends to complete for(Future<RecordMetadata> future : futures) { RecordMetadata record = waitForResponse(future, properties); records.add(record); } producer.flush(); } return records; }
@Override public void run() { try { for (int i = 1; i <= 10; i++) { String message = String.format("%s,%.3f,%d", "msg" + i, i * 2000f, i); producer.send(new ProducerRecord<String, String>(topic, message)); producer.flush(); Thread.sleep(100); } } catch (Throwable throwable) { System.out.printf("%s", throwable.fillInStackTrace()); } finally { producer.close(); } } }
private void createPageViews(final KafkaProducer<String, GenericRecord> producer, final GenericRecordBuilder pageViewBuilder, final int count, final String page) { pageViewBuilder.set("page", page); for (int i = 0; i< count; i++) { pageViewBuilder.set("user", users[random.nextInt(users.length)]); producer.send(new ProducerRecord<>(TopArticlesLambdaExample.PAGE_VIEWS, pageViewBuilder.build())); } producer.flush(); }
private static void sendPlayEvents(final int count, final Song song, final KafkaProducer<String, PlayEvent> producer) { for (int i = 0; i < count; i++) { producer.send(new ProducerRecord<>( KafkaMusicExample.PLAY_EVENTS, "UK", new PlayEvent(song.getId(), 60000L))); } producer.flush(); }