@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); } }
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
@Override public FlinkKafkaConsumerBase createKafkaConsumer() { FlinkKafkaConsumerBase consumer; KafkaMessageDeserialization kafkaMessageDeserialization = new KafkaMessageDeserialization(baseRowTypeInfo); Pattern pattern; if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) { pattern = Pattern.compile(topicPattern); consumer = new FlinkKafkaConsumer011(pattern, kafkaMessageDeserialization, properties); } else { consumer = new FlinkKafkaConsumer011(topic, kafkaMessageDeserialization, properties); } if (startupMode == StartupMode.TIMESTAMP && startTimeStamp >= -1){ ((FlinkKafkaConsumer011) consumer).setStartFromTimestamp(startTimeStamp); } return consumer; }
/** * @param env * @param topic * @param time 订阅的时间 * @return * @throws IllegalAccessException */ public static DataStreamSource<Metrics> buildSource(StreamExecutionEnvironment env, String topic, Long time) throws IllegalAccessException { ParameterTool parameterTool = (ParameterTool) env.getConfig().getGlobalJobParameters(); Properties props = buildKafkaProps(parameterTool); FlinkKafkaConsumer011<Metrics> consumer = new FlinkKafkaConsumer011<>( topic, new MetricSchema(), props); //重置offset到time时刻 if (time != 0L) { Map<KafkaTopicPartition, Long> partitionOffset = buildOffsetByTime(props, parameterTool, time); consumer.setStartFromSpecificOffsets(partitionOffset); } return env.addSource(consumer); }
FlinkKafkaConsumer011<TaxiRide> consumer = new FlinkKafkaConsumer011<>( "cleansedRides", new TaxiRideSchema(), kafkaProps); consumer.assignTimestampsAndWatermarks(new TaxiRideTSExtractor());
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer011<>(topics, readSchema, props); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); } }
@Override protected FlinkKafkaConsumerBase<Row> createKafkaConsumer(String topic, Properties properties, DeserializationSchema<Row> deserializationSchema) { return new FlinkKafkaConsumer011<>(topic, deserializationSchema, properties); }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer011<>(topics, readSchema, props); }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer011<>(topics, readSchema, props); }
@Override public <T> FlinkKafkaConsumerBase<T> getConsumer(List<String> topics, KeyedDeserializationSchema<T> readSchema, Properties props) { return new FlinkKafkaConsumer011<>(topics, readSchema, props); }