/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, KeyedSerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined serialization schema supporting key/value messages */ public FlinkKafkaProducer010(String brokerList, String topicId, KeyedSerializationSchema<T> serializationSchema) { this(topicId, serializationSchema, getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. */ public FlinkKafkaProducer010(String brokerList, String topicId, SerializationSchema<T> serializationSchema) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, KeyedSerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined serialization schema supporting key/value messages */ public FlinkKafkaProducer010(String brokerList, String topicId, KeyedSerializationSchema<T> serializationSchema) { this(topicId, serializationSchema, getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, KeyedSerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined serialization schema supporting key/value messages */ public FlinkKafkaProducer010(String brokerList, String topicId, KeyedSerializationSchema<T> serializationSchema) { this(topicId, serializationSchema, getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. */ public FlinkKafkaProducer010(String brokerList, String topicId, SerializationSchema<T> serializationSchema) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }
/** * Creates a FlinkKafkaProducer for a given topic. The sink produces a DataStream to * the topic. * * <p>Using this constructor, the default {@link FlinkFixedPartitioner} will be used as * the partitioner. This default partitioner maps each sink subtask to a single Kafka * partition (i.e. all records received by a sink subtask will end up in the same * Kafka partition). * * <p>To use a custom partitioner, please use * {@link #FlinkKafkaProducer010(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead. * * @param brokerList * Comma separated addresses of the brokers * @param topicId * ID of the Kafka topic. * @param serializationSchema * User defined key-less serialization schema. */ public FlinkKafkaProducer010(String brokerList, String topicId, SerializationSchema<T> serializationSchema) { this(topicId, new KeyedSerializationSchemaWrapper<>(serializationSchema), getPropertiesFromBrokerList(brokerList), new FlinkFixedPartitioner<T>()); }