@SuppressWarnings("unchecked") private static KafkaStream<byte[], byte[]> createMockStream(BlockingQueue<FetchedDataChunk> queue) { KafkaStream<byte[], byte[]> stream = (KafkaStream<byte[], byte[]>) Mockito.mock(KafkaStream.class); ConsumerIterator<byte[], byte[]> it = new ConsumerIterator<>(queue, -1, new DefaultDecoder(new VerifiableProperties()), new DefaultDecoder(new VerifiableProperties()), "clientId"); Mockito.when(stream.iterator()).thenReturn(it); return stream; }
Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "..."); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "..."); props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true); VerifiableProp vProps = new VerifiableProperties(props); KafkaAvroDecoder decoder = new KafkaAvroDecoder(vProps); MyLittleData data = (MyLittleData) decoder.fromBytes(input);
private void start(String topic) { consumer = Consumer.createJavaConsumerConnector(config); /* We tell Kafka how many threads will read each topic. We have one topic and one thread */ Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic,new Integer(1)); /* We will use a decoder to get Kafka to convert messages to Strings * valid property will be deserializer.encoding with the charset to use. * default is UTF8 which works for us */ StringDecoder decoder = new StringDecoder(new VerifiableProperties()); /* Kafka will give us a list of streams of messages for each topic. In this case, its just one topic with a list of a single stream */ stream = consumer.createMessageStreams(topicCountMap, decoder, decoder).get(topic).get(0); }
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) { _collector = spoutOutputCollector; Properties props = new Properties(); props.put("zookeeper.connect", conf.get(OSMIngest.ZOOKEEPERS)); props.put("group.id", groupId); props.put("zookeeper.sync.time.ms", "200"); props.put("auto.commit.interval.ms", "1000"); ConsumerConfig consumerConfig = new ConsumerConfig(props); ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig); Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, 1); Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(new VerifiableProperties()), new StringDecoder(new VerifiableProperties())); List<KafkaStream<String, String>> streams = consumerMap.get(topic); KafkaStream<String, String> stream = null; if (streams.size() == 1) { stream = streams.get(0); } else { log.error("Streams should be of size 1"); } kafkaIterator = stream.iterator(); } }
protected void initKafka() { if (handler == null) { log.error("Exectuor can't be null!"); throw new RuntimeException("Exectuor can't be null!"); } log.info("Consumer properties:" + properties); ConsumerConfig config = new ConsumerConfig(properties); isAutoCommitOffset = config.autoCommitEnable(); log.info("Auto commit: " + isAutoCommitOffset); consumerConnector = Consumer.createJavaConsumerConnector(config); Map<String, Integer> topics = new HashMap<String, Integer>(); topics.put(topic, streamNum); StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties()); StringDecoder valueDecoder = new StringDecoder( new VerifiableProperties()); Map<String, List<KafkaStream<String, String>>> streamsMap = consumerConnector .createMessageStreams(topics, keyDecoder, valueDecoder); streams = streamsMap.get(topic); log.info("Streams:" + streams); if (streams == null || streams.isEmpty()) { log.error("Streams are empty."); throw new IllegalArgumentException("Streams are empty."); } streamThreadPool = Executors.newFixedThreadPool(streamNum); }
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
consumerProperties.put("consumer.timeout.ms", "500"); ConsumerConnector javaConsumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties)); StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties())); Map<String, Integer> topicMap = new HashMap<>(); topicMap.put(topicName, 1);
KafkaMetricsReporter$.MODULE$.startReporters(new VerifiableProperties(props));
ImmutableMap.of(topic, 1); final StringDecoder decoder = new StringDecoder(new VerifiableProperties()); final Map<String, List<KafkaStream<String,String>>> consumerMap = consumer.createMessageStreams(topicCountMap, decoder, decoder);
public AvroMessageDecoderTest() { avroSerializer.configure(Collections.singletonMap( KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081" ), false); Properties props = new Properties(); props.setProperty("schema.registry.url", "http://localhost:8081"); avroEncoder = new KafkaAvroEncoder(schemaRegistry, new VerifiableProperties(props)); }
private static ZkUtils getZkUtils(Properties properties) { if (properties == null) throw new IllegalArgumentException("properties cannot be null"); Tuple2<ZkClient, ZkConnection> tuple; try { ZKConfig zkConfig = new ZKConfig(new VerifiableProperties(properties)); tuple = ZkUtils.createZkClientAndConnection(zkConfig.zkConnect(), zkConfig.zkSessionTimeoutMs(), zkConfig.zkConnectionTimeoutMs()); } catch (ZkException e) { throw new AdminOperationException("Unable to create admin connection", e); } boolean isSecure = Boolean.valueOf(properties.getProperty(ZOOKEEPER_SECURE, DEFAULT_ZOOKEEPER_SECURE)); return new ZkUtils(tuple._1(), tuple._2(), isSecure); }
@Override public void start() { //重置offset if(consumerContext.getOffsetLogHanlder() != null){ resetCorrectOffsets(); } Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); for (String topicName : consumerContext.getMessageHandlers().keySet()) { int nThreads = 1; topicCountMap.put(topicName, nThreads); logger.info("topic[{}] assign fetch Threads {}",topicName,nThreads); } StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties()); MessageDecoder valueDecoder = new MessageDecoder(deserializer); Map<String, List<KafkaStream<String, Object>>> consumerMap = this.connector.createMessageStreams(topicCountMap, keyDecoder, valueDecoder); for (String topicName : consumerContext.getMessageHandlers().keySet()) { final List<KafkaStream<String, Object>> streams = consumerMap.get(topicName); for (final KafkaStream<String, Object> stream : streams) { MessageProcessor processer = new MessageProcessor(topicName, stream); this.fetchExecutor.execute(processer); } } // runing.set(true); }
Seq<KafkaMetricsReporter> reporters = KafkaMetricsReporter$.MODULE$.startReporters(new VerifiableProperties(new Properties())); kafkaServer = (KafkaServer) kafkaServerConstructor.newInstance(kafkaConfig, new LocalSystemTime(), threadPrefixName, reporters);
@Override public void startStream() { Properties props = new Properties(); props.setProperty("serializer.encoding", "UTF8"); ConsumerConfig consumerConfig = new ConsumerConfig(props); consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig); Whitelist topics = new Whitelist(config.getTopic()); VerifiableProperties vprops = new VerifiableProperties(props); inStreams = consumerConnector.createMessageStreamsByFilter(topics, 1, new StringDecoder(vprops), new StringDecoder(vprops)); for (final KafkaStream stream : inStreams) { executor.submit(new KafkaPersistReaderTask(this, stream)); } }
/** * Returns an {@link Authorizer} to make {@link Acl} requests * * @return an {@link Authorizer} to make {@link Acl} requests * * @throws AdminOperationException * if there is an issue creating the authorizer */ public Authorizer getAuthorizer() { if (authorizer == null) { ZKConfig zkConfig = new ZKConfig(new VerifiableProperties(properties)); Map<String, Object> authorizerProps = new HashMap<>(); authorizerProps.put(ZKConfig.ZkConnectProp(), zkConfig.zkConnect()); authorizerProps.put(ZKConfig.ZkConnectionTimeoutMsProp(), zkConfig.zkConnectionTimeoutMs()); authorizerProps.put(ZKConfig.ZkSessionTimeoutMsProp(), zkConfig.zkSessionTimeoutMs()); authorizerProps.put(ZKConfig.ZkSyncTimeMsProp(), zkConfig.zkSyncTimeMs()); try { Authorizer simpleAclAuthorizer = new SimpleAclAuthorizer(); simpleAclAuthorizer.configure(authorizerProps); authorizer = simpleAclAuthorizer; } catch (ZkException e) { throw new AdminOperationException("Unable to create authorizer", e); } } return authorizer; }