@SuppressWarnings("unchecked") private Optional<FlinkKafkaPartitioner<Row>> getFlinkKafkaPartitioner(DescriptorProperties descriptorProperties) { return descriptorProperties .getOptionalString(CONNECTOR_SINK_PARTITIONER) .flatMap((String partitionerString) -> { switch (partitionerString) { case CONNECTOR_SINK_PARTITIONER_VALUE_FIXED: return Optional.of(new FlinkFixedPartitioner<>()); case CONNECTOR_SINK_PARTITIONER_VALUE_ROUND_ROBIN: return Optional.empty(); case CONNECTOR_SINK_PARTITIONER_VALUE_CUSTOM: final Class<? extends FlinkKafkaPartitioner> partitionerClass = descriptorProperties.getClass(CONNECTOR_SINK_PARTITIONER_CLASS, FlinkKafkaPartitioner.class); return Optional.of((FlinkKafkaPartitioner<Row>) InstantiationUtil.instantiate(partitionerClass)); default: throw new TableException("Unsupported sink partitioner. Validator should have checked that."); } }); }
private ActionRequestFailureHandler getFailureHandler(DescriptorProperties descriptorProperties) { final String failureHandler = descriptorProperties .getOptionalString(CONNECTOR_FAILURE_HANDLER) .orElse(DEFAULT_FAILURE_HANDLER); switch (failureHandler) { case CONNECTOR_FAILURE_HANDLER_VALUE_FAIL: return new NoOpFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_IGNORE: return new IgnoringFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_RETRY: return new RetryRejectedExecutionFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_CUSTOM: final Class<? extends ActionRequestFailureHandler> clazz = descriptorProperties .getClass(CONNECTOR_FAILURE_HANDLER_CLASS, ActionRequestFailureHandler.class); return InstantiationUtil.instantiate(clazz); default: throw new IllegalArgumentException("Unknown failure handler."); } }
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
@SuppressWarnings("unchecked") private Optional<FlinkKafkaPartitioner<Row>> getFlinkKafkaPartitioner(DescriptorProperties descriptorProperties) { return descriptorProperties .getOptionalString(CONNECTOR_SINK_PARTITIONER) .flatMap((String partitionerString) -> { switch (partitionerString) { case CONNECTOR_SINK_PARTITIONER_VALUE_FIXED: return Optional.of(new FlinkFixedPartitioner<>()); case CONNECTOR_SINK_PARTITIONER_VALUE_ROUND_ROBIN: return Optional.empty(); case CONNECTOR_SINK_PARTITIONER_VALUE_CUSTOM: final Class<? extends FlinkKafkaPartitioner> partitionerClass = descriptorProperties.getClass(CONNECTOR_SINK_PARTITIONER_CLASS, FlinkKafkaPartitioner.class); return Optional.of(InstantiationUtil.instantiate(partitionerClass)); default: throw new TableException("Unsupported sink partitioner. Validator should have checked that."); } }); }
@SuppressWarnings("unchecked") private Optional<FlinkKafkaPartitioner<Row>> getFlinkKafkaPartitioner(DescriptorProperties descriptorProperties) { return descriptorProperties .getOptionalString(CONNECTOR_SINK_PARTITIONER) .flatMap((String partitionerString) -> { switch (partitionerString) { case CONNECTOR_SINK_PARTITIONER_VALUE_FIXED: return Optional.of(new FlinkFixedPartitioner<>()); case CONNECTOR_SINK_PARTITIONER_VALUE_ROUND_ROBIN: return Optional.empty(); case CONNECTOR_SINK_PARTITIONER_VALUE_CUSTOM: final Class<? extends FlinkKafkaPartitioner> partitionerClass = descriptorProperties.getClass(CONNECTOR_SINK_PARTITIONER_CLASS, FlinkKafkaPartitioner.class); return Optional.of(InstantiationUtil.instantiate(partitionerClass)); default: throw new TableException("Unsupported sink partitioner. Validator should have checked that."); } }); }
private ActionRequestFailureHandler getFailureHandler(DescriptorProperties descriptorProperties) { final String failureHandler = descriptorProperties .getOptionalString(CONNECTOR_FAILURE_HANDLER) .orElse(DEFAULT_FAILURE_HANDLER); switch (failureHandler) { case CONNECTOR_FAILURE_HANDLER_VALUE_FAIL: return new NoOpFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_IGNORE: return new IgnoringFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_RETRY: return new RetryRejectedExecutionFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_CUSTOM: final Class<? extends ActionRequestFailureHandler> clazz = descriptorProperties .getClass(CONNECTOR_FAILURE_HANDLER_CLASS, ActionRequestFailureHandler.class); return InstantiationUtil.instantiate(clazz); default: throw new IllegalArgumentException("Unknown failure handler."); } }
private ActionRequestFailureHandler getFailureHandler(DescriptorProperties descriptorProperties) { final String failureHandler = descriptorProperties .getOptionalString(CONNECTOR_FAILURE_HANDLER) .orElse(DEFAULT_FAILURE_HANDLER); switch (failureHandler) { case CONNECTOR_FAILURE_HANDLER_VALUE_FAIL: return new NoOpFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_IGNORE: return new IgnoringFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_RETRY: return new RetryRejectedExecutionFailureHandler(); case CONNECTOR_FAILURE_HANDLER_VALUE_CUSTOM: final Class<? extends ActionRequestFailureHandler> clazz = descriptorProperties .getClass(CONNECTOR_FAILURE_HANDLER_CLASS, ActionRequestFailureHandler.class); return InstantiationUtil.instantiate(clazz); default: throw new IllegalArgumentException("Unknown failure handler."); } }
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
@Override public DeserializationSchema<Row> createDeserializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowDeserializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowDeserializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }
@Override public SerializationSchema<Row> createSerializationSchema(Map<String, String> properties) { final DescriptorProperties descriptorProperties = getValidatedProperties(properties); // create and configure if (descriptorProperties.containsKey(AvroValidator.FORMAT_RECORD_CLASS)) { return new AvroRowSerializationSchema( descriptorProperties.getClass(AvroValidator.FORMAT_RECORD_CLASS, SpecificRecord.class)); } else { return new AvroRowSerializationSchema(descriptorProperties.getString(AvroValidator.FORMAT_AVRO_SCHEMA)); } }