@Override public SimpleConsumer buildSimpleConsumer(String host, int port, int soTimeout, int bufferSize, String clientId) { return new SimpleConsumer(host, port, soTimeout, bufferSize, clientId); } }
private SimpleConsumer createSimpleConsumer(String host, int port) { return new SimpleConsumer(host, port, this.socketTimeoutMillis, this.bufferSize, this.clientName); }
private SimpleConsumer createSimpleConsumer(String host, int port) { return new SimpleConsumer(host, port, this.socketTimeoutMillis, this.bufferSize, this.clientName); }
private SimpleConsumer createConsumer(String host, int port, String clientName) { return new SimpleConsumer(host, port, 100000, 64 * 1024, clientName); }
@Override protected void initializeConnections() { URL contactUrl = NetUtils.getCorrectHostnamePort(seedBrokerAddresses[currentContactSeedBrokerIndex]); this.consumer = new SimpleConsumer(contactUrl.getHost(), contactUrl.getPort(), soTimeout, bufferSize, dummyClientId); }
/** * Re-establish broker connection using the next available seed broker address. */ private void useNextAddressAsNewContactSeedBroker() { if (++currentContactSeedBrokerIndex == seedBrokerAddresses.length) { currentContactSeedBrokerIndex = 0; } URL newContactUrl = NetUtils.getCorrectHostnamePort(seedBrokerAddresses[currentContactSeedBrokerIndex]); this.consumer = new SimpleConsumer(newContactUrl.getHost(), newContactUrl.getPort(), soTimeout, bufferSize, dummyClientId); }
private SimpleConsumer createConsumer(HostAddress host) { log.info("Creating new Consumer for %s", host); return new SimpleConsumer(host.getHostText(), host.getPort(), connectTimeoutMillis, bufferSizeBytes, format("presto-kafka-%s-%s", connectorId, nodeManager.getCurrentNode().getNodeIdentifier())); } }
private SimpleConsumer findLeaderConsumer(int partition) { try { if (consumer != null) { return consumer; } PartitionMetadata metadata = findLeader(partition); if (metadata == null) { leaderBroker = null; consumer = null; return null; } leaderBroker = metadata.leader(); consumer = new SimpleConsumer(leaderBroker.host(), leaderBroker.port(), config.socketTimeoutMs, config.socketReceiveBufferBytes, config.clientId); return consumer; } catch (Exception e) { LOG.error(e.getMessage(), e); } return null; }
try { if (consumer == null) { consumer = new SimpleConsumer(brokerHost.getHost(), brokerHost.getPort(), config.socketTimeoutMs, config.socketReceiveBufferBytes, config.clientId); consumer = new SimpleConsumer(host.getHost(), host.getPort(), config.socketTimeoutMs, config.socketReceiveBufferBytes, config.clientId);
private void ensureConsumer(Broker leader) throws InterruptedException { if (consumer == null) { while (leaderBroker == null) { leaderBroker = findNewLeader(leader); } log.info( "making SimpleConsumer[%s][%s], leader broker[%s:%s]", topic, partitionId, leaderBroker.host(), leaderBroker.port() ); consumer = new SimpleConsumer( leaderBroker.host(), leaderBroker.port(), SO_TIMEOUT_MILLIS, BUFFER_SIZE, clientId ); } }
consumer = new SimpleConsumer(broker.host(), broker.port(), soTimeout, bufferSize, clientId); consumer = new SimpleConsumer(broker.host(), broker.port(), soTimeout, bufferSize, clientId); continue; // retry } else {
try { log.info("Finding new leader from Kafka brokers, try broker [%s]", broker.toString()); consumer = new SimpleConsumer(broker.getHostText(), broker.getPort(), SO_TIMEOUT_MILLIS, BUFFER_SIZE, leaderLookupClientId); TopicMetadataResponse resp = consumer.send(new TopicMetadataRequest(Collections.singletonList(topic)));
private SimpleConsumer getSimpleConsumer(String broker) { SimpleConsumer consumer = brokerConsumer.get(broker); if (consumer == null) { int idx = broker.indexOf(":"); if (idx >= 0) { String brokerHost = broker.substring(0, idx); String port = broker.substring(idx + 1); consumer = new SimpleConsumer(brokerHost, Integer.parseInt(port), 60000, 64 * 1024, "metadataFetcher-" + brokerHost); brokerConsumer.put(broker, consumer); } } return consumer; }
public SimpleConsumer createSimpleConsumer(JobContext context, String host, int port) { SimpleConsumer consumer = new SimpleConsumer(host, port, CamusJob.getKafkaTimeoutValue(context), CamusJob.getKafkaBufferSize(context), CamusJob.getKafkaClientName(context)); return consumer; }
@Override public SimpleConsumer load(HostAndPort host) throws Exception { LOGGER.info("Creating new Consumer for {}", host); return new SimpleConsumer(host.getHostText(), host.getPort(), 10000, 1024, "consumer"); } }
@Override public SimpleConsumer createSimpleConsumer(JobContext context, String host, int port) { switch (consumerType) { case REGULAR: return new SimpleConsumer(host, port, CamusJob.getKafkaTimeoutValue(context), CamusJob.getKafkaBufferSize(context), CamusJob.getKafkaClientName(context)); case MOCK: return consumer; default: throw new RuntimeException("consumer type not found"); } }
private void refreshTopicMetadata() { TopicMetadataRequest request = new TopicMetadataRequest(Collections.singletonList(kafkaRequest.getTopic())); TopicMetadataResponse response; try { response = simpleConsumer.send(request); } catch (Exception e) { log.error("Exception caught when refreshing metadata for topic " + request.topics().get(0) + ": " + e.getMessage()); return; } TopicMetadata metadata = response.topicsMetadata().get(0); for (PartitionMetadata partitionMetadata : metadata.partitionsMetadata()) { if (partitionMetadata.partitionId() == kafkaRequest.getPartition()) { simpleConsumer = new SimpleConsumer(partitionMetadata.leader().host(), partitionMetadata.leader().port(), CamusJob.getKafkaTimeoutValue(context), CamusJob.getKafkaBufferSize(context), CamusJob.getKafkaClientName(context)); break; } } }
public SimpleConsumer getConsumer(String host, int port) { SimpleConsumer consumer = consumerMap.get(host + ":" + port); if (consumer == null) { consumer = new SimpleConsumer(host, port, KafkaOffsetConstants.TIMEOUT, KafkaOffsetConstants.BUFFERSIZE, KafkaOffsetConstants.CLIENT_NAME); LOGGER.info("Created a new Kafka Consumer with host {}:{} ", host, port); consumerMap.put(host + ":" + port, consumer); } return consumer; }
private SimpleConsumer getConsumerClient(String kafkaServer){ if(consumers.containsKey(kafkaServer))return consumers.get(kafkaServer); String host = kafkaServer.split(":")[0]; int port = Integer.parseInt(kafkaServer.split(":")[1]); SimpleConsumer consumer = new SimpleConsumer(host, port, 100000, 64 * 1024, CLIENT_ID); consumers.put(kafkaServer, consumer); return consumer; }
consumer = new SimpleConsumer("127.0.0.1", cluster.getKafkaServerPort(0), DEFAULT_SO_TIMEOUT, DEFAULT_BUFFER_SIZE, "foo"); FetchRequest req = new FetchRequestBuilder().addFetch(sinkTopic, 0, 0, DEFAULT_BUFFER_SIZE).build();