Tabnine Logo
KafkaSchemaRegistryFactory.getSchemaRegistry
Code IndexAdd Tabnine to your IDE (free)

How to use
getSchemaRegistry
method
in
org.apache.gobblin.kafka.schemareg.KafkaSchemaRegistryFactory

Best Java code snippets using org.apache.gobblin.kafka.schemareg.KafkaSchemaRegistryFactory.getSchemaRegistry (Showing top 8 results out of 315)

origin: apache/incubator-gobblin

 public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) {
  Properties props = new Properties();
  props.putAll(config);
  return KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 }
}
origin: apache/incubator-gobblin

/**
 * Configure this class.
 * @param configs configs in key/value pairs
 * @param isKey whether is for key or value
 */
public void configure(Map<String, ?> configs, boolean isKey) {
 Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields");
 _datumReader = new GenericDatumReader<>();
 Properties props = new Properties();
 for (Map.Entry<String, ?> entry: configs.entrySet())
 {
  String value = String.valueOf(entry.getValue());
  props.setProperty(entry.getKey(), value);
 }
 _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
}
origin: apache/incubator-gobblin

@Override
public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) {
 this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties());
 this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry);
 return this;
}
origin: apache/incubator-gobblin

public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint)
{
 Config config = ConfigFactory.parseProperties(props);
 topic = config.getString("topic");
 String zkConnect = config.getString("zookeeper.connect");
 schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 deserializer = new LiAvroDeserializer(schemaRegistry);
 /** TODO: Make Confluent schema registry integration configurable
  * HashMap<String, String> avroSerDeConfig = new HashMap<>();
  * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081");
  * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
  * deserializer.configure(avroSerDeConfig, false);
  *
  **/
 Properties consumeProps = new Properties();
 consumeProps.put("zookeeper.connect", zkConnect);
 consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime());
 consumeProps.put("zookeeper.session.timeout.ms", "10000");
 consumeProps.put("zookeeper.sync.time.ms", "10000");
 consumeProps.put("auto.commit.interval.ms", "10000");
 consumeProps.put("auto.offset.reset", "smallest");
 consumeProps.put("auto.commit.enable", "false");
 //consumeProps.put("consumer.timeout.ms", "10000");
 consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));
 Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1));
 List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
 stream = streams.get(0);
 iterator = stream.iterator();
}
origin: org.apache.gobblin/gobblin-kafka-common

 public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) {
  Properties props = new Properties();
  props.putAll(config);
  return KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 }
}
origin: org.apache.gobblin/gobblin-kafka-common

/**
 * Configure this class.
 * @param configs configs in key/value pairs
 * @param isKey whether is for key or value
 */
public void configure(Map<String, ?> configs, boolean isKey) {
 Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields");
 _datumReader = new GenericDatumReader<>();
 Properties props = new Properties();
 for (Map.Entry<String, ?> entry: configs.entrySet())
 {
  String value = String.valueOf(entry.getValue());
  props.setProperty(entry.getKey(), value);
 }
 _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
}
origin: org.apache.gobblin/gobblin-kafka-common

@Override
public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) {
 this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties());
 this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry);
 return this;
}
origin: org.apache.gobblin/gobblin-kafka-08

public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint)
{
 Config config = ConfigFactory.parseProperties(props);
 topic = config.getString("topic");
 String zkConnect = config.getString("zookeeper.connect");
 schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 deserializer = new LiAvroDeserializer(schemaRegistry);
 /** TODO: Make Confluent schema registry integration configurable
  * HashMap<String, String> avroSerDeConfig = new HashMap<>();
  * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081");
  * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
  * deserializer.configure(avroSerDeConfig, false);
  *
  **/
 Properties consumeProps = new Properties();
 consumeProps.put("zookeeper.connect", zkConnect);
 consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime());
 consumeProps.put("zookeeper.session.timeout.ms", "10000");
 consumeProps.put("zookeeper.sync.time.ms", "10000");
 consumeProps.put("auto.commit.interval.ms", "10000");
 consumeProps.put("auto.offset.reset", "smallest");
 consumeProps.put("auto.commit.enable", "false");
 //consumeProps.put("consumer.timeout.ms", "10000");
 consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));
 Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1));
 List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
 stream = streams.get(0);
 iterator = stream.iterator();
}
org.apache.gobblin.kafka.schemaregKafkaSchemaRegistryFactorygetSchemaRegistry

Popular methods of KafkaSchemaRegistryFactory

    Popular in Java

    • Start an intent from android
    • scheduleAtFixedRate (Timer)
    • getContentResolver (Context)
    • getApplicationContext (Context)
    • File (java.io)
      An "abstract" representation of a file system entity identified by a pathname. The pathname may be a
    • RandomAccessFile (java.io)
      Allows reading from and writing to a file in a random-access manner. This is different from the uni-
    • Runnable (java.lang)
      Represents a command that can be executed. Often used to run code in a different Thread.
    • Format (java.text)
      The base class for all formats. This is an abstract base class which specifies the protocol for clas
    • ThreadPoolExecutor (java.util.concurrent)
      An ExecutorService that executes each submitted task using one of possibly several pooled threads, n
    • ZipFile (java.util.zip)
      This class provides random read access to a zip file. You pay more to read the zip file's central di
    • Top plugins for Android Studio
    Tabnine Logo
    • Products

      Search for Java codeSearch for JavaScript code
    • IDE Plugins

      IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
    • Company

      About UsContact UsCareers
    • Resources

      FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
    Get Tabnine for your IDE now