@SuppressWarnings("unchecked") private static KafkaStream<byte[], byte[]> createMockStream(BlockingQueue<FetchedDataChunk> queue) { KafkaStream<byte[], byte[]> stream = (KafkaStream<byte[], byte[]>) Mockito.mock(KafkaStream.class); ConsumerIterator<byte[], byte[]> it = new ConsumerIterator<>(queue, -1, new DefaultDecoder(new VerifiableProperties()), new DefaultDecoder(new VerifiableProperties()), "clientId"); Mockito.when(stream.iterator()).thenReturn(it); return stream; }
@Override public void init(VerifiableProperties props) { if (!initialized) { this.props = new Properties(); if (props.containsKey(CONFIG_POLLING_INTERVAL)) { this.pollingIntervalSeconds = props.getInt(CONFIG_POLLING_INTERVAL); } else { this.pollingIntervalSeconds = 10; } this.brokerId = Integer.parseInt(props.getProperty("broker.id")); log.info("Building ConsumerGroupReporter: polling.interval=" + pollingIntervalSeconds); Enumeration<Object> keys = props.props().keys(); while (keys.hasMoreElements()) { String key = keys.nextElement().toString(); if (key.startsWith("kafka.metrics.")) { String subKey = key.substring(14); this.props.put(subKey, props.props().get(key)); log.info("Building ConsumerGroupReporter: " + subKey + "=" + this.props.get(subKey)); } } initialized = true; this.underlying = new X(Metrics.defaultRegistry()); startReporter(pollingIntervalSeconds); } }
@Override public synchronized void init(VerifiableProperties props) { if (!initialized) { KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(props); graphiteHost = props.getString("kafka.graphite.metrics.host", GRAPHITE_DEFAULT_HOST); graphitePort = props.getInt("kafka.graphite.metrics.port", GRAPHITE_DEFAULT_PORT); metricPrefix = props.getString("kafka.graphite.metrics.group", GRAPHITE_DEFAULT_PREFIX); String excludeRegex = props.getString("kafka.graphite.metrics.exclude.regex", null); metricDimensions = Dimension.fromProperties(props.props(), "kafka.graphite.dimension.enabled."); LOG.debug("Initialize GraphiteReporter [{},{},{}]", graphiteHost, graphitePort, metricPrefix); if (excludeRegex != null) { LOG.debug("Using regex [{}] for GraphiteReporter", excludeRegex); metricPredicate = new FilterMetricPredicate(excludeRegex); } reporter = buildGraphiteReporter(); if (props.getBoolean("kafka.graphite.metrics.reporter.enabled", false)) { initialized = true; startReporter(metricsConfig.pollingIntervalSecs()); LOG.debug("GraphiteReporter started."); } } }
@Override public synchronized void init(VerifiableProperties props) { if (!initialized) { KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(props); gangliaHost = props.getString("kafka.ganglia.metrics.host", GANGLIA_DEFAULT_HOST); gangliaPort = props.getInt("kafka.ganglia.metrics.port", GANGLIA_DEFAULT_PORT); gangliaGroupPrefix = props.getString("kafka.ganglia.metrics.group", GANGLIA_DEFAULT_PREFIX); String regex = props.getString("kafka.ganglia.metrics.exclude.regex", null); if (regex != null) { predicate = new RegexMetricPredicate(regex); } try { reporter = new GangliaReporter( Metrics.defaultRegistry(), gangliaHost, gangliaPort, gangliaGroupPrefix, predicate ); } catch (IOException e) { LOG.error("Unable to initialize GangliaReporter", e); } if (props.getBoolean("kafka.ganglia.metrics.reporter.enabled", false)) { initialized = true; startReporter(metricsConfig.pollingIntervalSecs()); } } } }
@Override protected ProducerConfig customizeConfig(final ProducerConfig config) { final Properties props = config.props().props(); props.setProperty("serializer.class", "kafka.serializer.DefaultEncoder"); return new ProducerConfig(props); }
public StringPartitioner(VerifiableProperties props) { this.numPartitions = Integer.parseInt(props.getProperty(Constants.Logging.NUM_PARTITIONS)); Preconditions.checkArgument(this.numPartitions > 0, "numPartitions should be at least 1. Got %s", this.numPartitions); }
@Override protected ProducerConfig customizeConfig(final ProducerConfig config) { final Properties props = config.props().props(); props.setProperty("serializer.class", "kafka.serializer.StringEncoder"); return new ProducerConfig(props); }
public StringPartitioner(VerifiableProperties props) { this.numPartitions = Integer.parseInt(props.getProperty(Constants.Logging.NUM_PARTITIONS)); Preconditions.checkArgument(this.numPartitions > 0, "numPartitions should be at least 1. Got %s", this.numPartitions); }
Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "..."); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "..."); props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true); VerifiableProp vProps = new VerifiableProperties(props); KafkaAvroDecoder decoder = new KafkaAvroDecoder(vProps); MyLittleData data = (MyLittleData) decoder.fromBytes(input);
public void init(VerifiableProperties kafkaConfig) { if (!initialized) { initialized = true; this.config = kafkaConfig.props(); this.builder = forRegistry(Metrics.defaultRegistry()); builder.configure(config); underlying = builder.build(); startReporter(underlying.getPollingIntervaSeconds()); } }
@Override public void init(VerifiableProperties verifiableProperties) { if (!initialized) { // get configured metrics from kafka KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(verifiableProperties); // get the configured properties from kafka to set the bindAddress and port. bindAddress = verifiableProperties.getProperty("kafka.http.metrics.host"); port = Integer.parseInt(verifiableProperties.getProperty("kafka.http.metrics.port")); enabled = Boolean.parseBoolean(verifiableProperties.getProperty("kafka.http.metrics.reporter.enabled")); // construct the Metrics Server metricsServer = new KafkaHttpMetricsServer(bindAddress, port); initialized = true; // call the method startReporter startReporter(metricsConfig.pollingIntervalSecs()); } else { LOG.error("Kafka Http Metrics Reporter already initialized"); } }
private void start(String topic) { consumer = Consumer.createJavaConsumerConnector(config); /* We tell Kafka how many threads will read each topic. We have one topic and one thread */ Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic,new Integer(1)); /* We will use a decoder to get Kafka to convert messages to Strings * valid property will be deserializer.encoding with the charset to use. * default is UTF8 which works for us */ StringDecoder decoder = new StringDecoder(new VerifiableProperties()); /* Kafka will give us a list of streams of messages for each topic. In this case, its just one topic with a list of a single stream */ stream = consumer.createMessageStreams(topicCountMap, decoder, decoder).get(topic).get(0); }
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { _collector = spoutOutputCollector; Properties props = new Properties(); props.put("zookeeper.connect", conf.get(OSMIngest.ZOOKEEPERS)); props.put("group.id", groupId); props.put("zookeeper.sync.time.ms", "200"); props.put("auto.commit.interval.ms", "1000"); ConsumerConfig consumerConfig = new ConsumerConfig(props); ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig); Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, 1); Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(new VerifiableProperties()), new StringDecoder(new VerifiableProperties())); List<KafkaStream<String, String>> streams = consumerMap.get(topic); KafkaStream<String, String> stream = null; if (streams.size() == 1) { stream = streams.get(0); } else { log.error("Streams should be of size 1"); } kafkaIterator = stream.iterator(); } }
protected void initKafka() { if (handler == null) { log.error("Exectuor can't be null!"); throw new RuntimeException("Exectuor can't be null!"); } log.info("Consumer properties:" + properties); ConsumerConfig config = new ConsumerConfig(properties); isAutoCommitOffset = config.autoCommitEnable(); log.info("Auto commit: " + isAutoCommitOffset); consumerConnector = Consumer.createJavaConsumerConnector(config); Map<String, Integer> topics = new HashMap<String, Integer>(); topics.put(topic, streamNum); StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties()); StringDecoder valueDecoder = new StringDecoder( new VerifiableProperties()); Map<String, List<KafkaStream<String, String>>> streamsMap = consumerConnector .createMessageStreams(topics, keyDecoder, valueDecoder); streams = streamsMap.get(topic); log.info("Streams:" + streams); if (streams == null || streams.isEmpty()) { log.error("Streams are empty."); throw new IllegalArgumentException("Streams are empty."); } streamThreadPool = Executors.newFixedThreadPool(streamNum); }
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
KafkaMetricsReporter$.MODULE$.startReporters(new VerifiableProperties(props));
ImmutableMap.of(topic, 1); final StringDecoder decoder = new StringDecoder(new VerifiableProperties()); final Map<String, List<KafkaStream<String,String>>> consumerMap = consumer.createMessageStreams(topicCountMap, decoder, decoder);