@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); } }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer09<>(topics, readSchema, props); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer09<>(topic, deserializationSchema, properties); }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer09<>(topics, readSchema, props); }
@Override public FlinkKafkaConsumerBase createKafkaConsumer() { FlinkKafkaConsumerBase consumer; KafkaMessageDeserialization kafkaMessageDeserialization = new KafkaMessageDeserialization(baseRowTypeInfo); Pattern pattern; if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) { pattern = Pattern.compile(topicPattern); consumer = new FlinkKafkaConsumer09(pattern, kafkaMessageDeserialization, properties); } else { consumer = new FlinkKafkaConsumer09(topic, kafkaMessageDeserialization, properties); } return consumer; }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer09<>(topics, readSchema, props); }
/** * 初始化(driver执行) **/ public KafkaSource09(StreamTableEnvironment tableEnv, KafkaSource09Config config) { requireNonNull(tableEnv, "tableEnv is null"); requireNonNull(config, "config is null"); loadStream = Suppliers.memoize(() -> { String topics = config.topics; Properties properties = new Properties(); properties.put("bootstrap.servers", config.brokers); //需要注意hosts问题 //"enable.auto.commit" -> (false: java.lang.Boolean), //不自动提交偏移量 // "session.timeout.ms" -> "30000", //session默认是30秒 超过5秒不提交offect就会报错 // "heartbeat.interval.ms" -> "5000", //10秒提交一次 心跳周期 properties.put("group.id", config.groupid); //注意不同的流 group.id必须要不同 否则会出现offect commit提交失败的错误 properties.put("auto.offset.reset", config.offsetMode); //latest earliest properties.put("zookeeper.connect", config.zookeeper); List<String> topicSets = Arrays.asList(topics.split(",")); //org.apache.flink.streaming.api.checkpoint.CheckpointedFunction DataStream<Row> stream = tableEnv.execEnv().addSource(new FlinkKafkaConsumer09<Row>( topicSets, new RowDeserializer(), properties) ); return stream; }); }
new FlinkKafkaConsumer09<TemperatureEvent>("test", new EventDeserializationSchema(), properties));
new FlinkKafkaConsumer09<>("sensors", new DataPointSerializationSchema(), kafkaProperties);