private void publish(JsonNode message) throws IOException { if (!keepIndexing) { avroDataStream.close(); avroDataStream = null; return; } KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>("airlineStatsEvents", message.toString().getBytes("UTF-8")); producer.send(data); }
public MeetupRsvpStream(File schemaFile) throws IOException, URISyntaxException { schema = Schema.fromFile(schemaFile); Properties properties = new Properties(); properties.put("metadata.broker.list", KafkaStarterUtils.DEFAULT_KAFKA_BROKER); properties.put("serializer.class", "kafka.serializer.DefaultEncoder"); properties.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(properties); producer = new Producer<String, byte[]>(producerConfig); }
public void shutdown() { keepIndexing = false; avroDataStream = null; producer.close(); producer = null; service.shutdown(); }
private static List<Message> writeKafka(String topic, int numOfMessages) { List<Message> messages = new ArrayList<Message>(); List<KeyedMessage<String, String>> kafkaMessages = new ArrayList<KeyedMessage<String, String>>(); for (int i = 0; i < numOfMessages; i++) { Message msg = new Message(RANDOM.nextInt()); messages.add(msg); kafkaMessages.add(new KeyedMessage<String, String>(topic, Integer.toString(i), gson.toJson(msg))); } Properties producerProps = cluster.getProps(); producerProps.setProperty("serializer.class", StringEncoder.class.getName()); producerProps.setProperty("key.serializer.class", StringEncoder.class.getName()); Producer<String, String> producer = new Producer<String, String>(new ProducerConfig(producerProps)); try { producer.send(kafkaMessages); } finally { producer.close(); } return messages; }
public static void main(String[] args) throws InterruptedException { //Creating shared object BlockingQueue sharedQueue = new LinkedBlockingQueue(); //Creating Producer and Consumer Thread Producer producer = new Producer(sharedQueue); Thread prodThread = new Thread(producer); Thread consThread = new Thread(new Consumer(sharedQueue)); //Starting producer and Consumer thread prodThread.start(); consThread.start(); producer.pushItem(2000); }
private Producer mockProducerSendThrowsException() { Producer mockProducer = EasyMock.createMock(Producer.class); mockProducer.send((KeyedMessage) EasyMock.anyObject()); EasyMock.expectLastCall().andThrow(new RuntimeException("dummyException")).anyTimes(); mockProducer.close(); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(mockProducer); return mockProducer; }
@Override public void addResults(QueryStatusInfo statusInfo, QueryData data) { if (types.get() == null && statusInfo.getColumns() != null) { types.set(getTypes(statusInfo.getColumns())); } if (data.getData() != null) { checkState(types.get() != null, "Data without types received!"); List<Column> columns = statusInfo.getColumns(); for (List<Object> fields : data.getData()) { ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); for (int i = 0; i < fields.size(); i++) { Type type = types.get().get(i); Object value = convertValue(fields.get(i), type); if (value != null) { builder.put(columns.get(i).getName(), value); } } producer.send(new KeyedMessage<>(topicName, count.getAndIncrement(), builder.build())); } } }
public AirlineDataStream(Schema pinotSchema, File avroFile) throws FileNotFoundException, IOException { this.pinotSchema = pinotSchema; this.avroFile = avroFile; createStream(); Properties properties = new Properties(); properties.put("metadata.broker.list", KafkaStarterUtils.DEFAULT_KAFKA_BROKER); properties.put("serializer.class", "kafka.serializer.DefaultEncoder"); properties.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(properties); producer = new Producer<String, byte[]>(producerConfig); service = Executors.newFixedThreadPool(1); Quickstart.printStatus(Quickstart.Color.YELLOW, "***** Offine data has max time as 16101, realtime will start consuming from time 16102 and increment time every 3000 events *****"); }
public void awaitShutdown() { try { shutdownComplete.await(); producer.close(); logger.info("Producer thread " + threadName + " shutdown complete"); } catch(InterruptedException ie) { logger.warn("Interrupt during shutdown of ProducerThread", ie); } } }
private Producer mockProducerThirdSendSucceed() { Producer mockProducer = EasyMock.createMock(Producer.class); mockProducer.send((KeyedMessage) EasyMock.anyObject()); EasyMock.expectLastCall().andThrow(new RuntimeException("dummyException")).times(2); mockProducer.send((KeyedMessage) EasyMock.anyObject()); EasyMock.expectLastCall().times(1); mockProducer.close(); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(mockProducer); return mockProducer; }
public void run() { try{ while(true) { KeyedMessage<byte[], byte[]> data = producerDataChannel.receiveRequest(); if(!data.equals(shutdownMessage)) { producer.send(data); if(logger.isDebugEnabled()) logger.debug("Sending message %s".format(new String(data.message()))); } else break; } logger.info("Producer thread " + threadName + " finished running"); } catch (Throwable t){ logger.fatal("Producer thread failure due to ", t); } finally { shutdownComplete.countDown(); } }
protected Producer createProducer(Properties props) { return new Producer(new ProducerConfig(props)); }
/** * {@inheritDoc} */ @Override public void destroy() throws StreamingException { if (producer != null) { producer.close(); } }
@Override public void store(Event event) { GenericDatumWriter writer = new SourceFilteredRecordWriter(event.properties().getSchema(), GenericData.get(), sourceFields); ByteBuf buffer = Unpooled.buffer(100); BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder( new ByteBufOutputStream(buffer), null); try { writer.write(event.properties(), encoder); } catch (Exception e) { throw new RuntimeException("Couldn't serialize event", e); } try { producer.send(new KeyedMessage<>(event.project() + "_" + event.collection(), buffer.array())); } catch (FailedToSendMessageException e) { throw new RuntimeException("Couldn't send event to Kafka", e); } }
@Inject public KafkaEventStore(@Named("event.store.kafka") KafkaConfig config, FieldDependencyBuilder.FieldDependency fieldDependency) { config = checkNotNull(config, "config is null"); this.sourceFields = Sets.union(fieldDependency.dependentFields.keySet(), fieldDependency.constantFields.stream().map(SchemaField::getName) .collect(Collectors.toSet())); Properties props = new Properties(); props.put("metadata.broker.list", config.getNodes().stream().map(HostAndPort::toString).collect(Collectors.joining(","))); props.put("serializer.class", config.SERIALIZER); ProducerConfig producerConfig = new ProducerConfig(props); this.producer = new Producer(producerConfig); CuratorFramework client = CuratorFrameworkFactory.newClient(config.getZookeeperNode().toString(), new ExponentialBackoffRetry(1000, 3)); client.start(); try { if (client.checkExists().forPath(ZK_OFFSET_PATH) == null) client.create().forPath(ZK_OFFSET_PATH); } catch (Exception e) { LOGGER.error(e, format("Couldn't create event offset path %s", ZK_OFFSET_PATH)); } new LeaderSelector(client, ZK_OFFSET_PATH, this).start(); }