Tabnine Logo
KafkaSchemaRegistryFactory
Code IndexAdd Tabnine to your IDE (free)

How to use
KafkaSchemaRegistryFactory
in
org.apache.gobblin.kafka.schemareg

Best Java code snippets using org.apache.gobblin.kafka.schemareg.KafkaSchemaRegistryFactory (Showing top 8 results out of 315)

origin: apache/incubator-gobblin

 public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) {
  Properties props = new Properties();
  props.putAll(config);
  return KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 }
}
origin: apache/incubator-gobblin

/**
 * Configure this class.
 * @param configs configs in key/value pairs
 * @param isKey whether is for key or value
 */
public void configure(Map<String, ?> configs, boolean isKey) {
 Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields");
 _datumReader = new GenericDatumReader<>();
 Properties props = new Properties();
 for (Map.Entry<String, ?> entry: configs.entrySet())
 {
  String value = String.valueOf(entry.getValue());
  props.setProperty(entry.getKey(), value);
 }
 _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
}
origin: apache/incubator-gobblin

@Override
public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) {
 this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties());
 this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry);
 return this;
}
origin: apache/incubator-gobblin

public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint)
{
 Config config = ConfigFactory.parseProperties(props);
 topic = config.getString("topic");
 String zkConnect = config.getString("zookeeper.connect");
 schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 deserializer = new LiAvroDeserializer(schemaRegistry);
 /** TODO: Make Confluent schema registry integration configurable
  * HashMap<String, String> avroSerDeConfig = new HashMap<>();
  * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081");
  * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
  * deserializer.configure(avroSerDeConfig, false);
  *
  **/
 Properties consumeProps = new Properties();
 consumeProps.put("zookeeper.connect", zkConnect);
 consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime());
 consumeProps.put("zookeeper.session.timeout.ms", "10000");
 consumeProps.put("zookeeper.sync.time.ms", "10000");
 consumeProps.put("auto.commit.interval.ms", "10000");
 consumeProps.put("auto.offset.reset", "smallest");
 consumeProps.put("auto.commit.enable", "false");
 //consumeProps.put("consumer.timeout.ms", "10000");
 consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));
 Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1));
 List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
 stream = streams.get(0);
 iterator = stream.iterator();
}
origin: org.apache.gobblin/gobblin-kafka-common

 public static KafkaSchemaRegistry getSchemaRegistry(Map<String, ?> config) {
  Properties props = new Properties();
  props.putAll(config);
  return KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 }
}
origin: org.apache.gobblin/gobblin-kafka-common

/**
 * Configure this class.
 * @param configs configs in key/value pairs
 * @param isKey whether is for key or value
 */
public void configure(Map<String, ?> configs, boolean isKey) {
 Preconditions.checkArgument(isKey==false, "LiAvroDeserializer only works for value fields");
 _datumReader = new GenericDatumReader<>();
 Properties props = new Properties();
 for (Map.Entry<String, ?> entry: configs.entrySet())
 {
  String value = String.valueOf(entry.getValue());
  props.setProperty(entry.getKey(), value);
 }
 _schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
}
origin: org.apache.gobblin/gobblin-kafka-common

@Override
public Converter<S, Schema, byte[], GenericRecord> init(WorkUnitState workUnit) {
 this.schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(workUnit.getProperties());
 this.deserializer = new LiAvroDeserializerBase(this.schemaRegistry);
 return this;
}
origin: org.apache.gobblin/gobblin-kafka-08

public SimpleKafkaConsumer(Properties props, KafkaCheckpoint checkpoint)
{
 Config config = ConfigFactory.parseProperties(props);
 topic = config.getString("topic");
 String zkConnect = config.getString("zookeeper.connect");
 schemaRegistry = KafkaSchemaRegistryFactory.getSchemaRegistry(props);
 deserializer = new LiAvroDeserializer(schemaRegistry);
 /** TODO: Make Confluent schema registry integration configurable
  * HashMap<String, String> avroSerDeConfig = new HashMap<>();
  * avroSerDeConfig.put("schema.registry.url", "http://localhost:8081");
  * deserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
  * deserializer.configure(avroSerDeConfig, false);
  *
  **/
 Properties consumeProps = new Properties();
 consumeProps.put("zookeeper.connect", zkConnect);
 consumeProps.put("group.id", "gobblin-tool-" + System.nanoTime());
 consumeProps.put("zookeeper.session.timeout.ms", "10000");
 consumeProps.put("zookeeper.sync.time.ms", "10000");
 consumeProps.put("auto.commit.interval.ms", "10000");
 consumeProps.put("auto.offset.reset", "smallest");
 consumeProps.put("auto.commit.enable", "false");
 //consumeProps.put("consumer.timeout.ms", "10000");
 consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));
 Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(ImmutableMap.of(topic, 1));
 List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this.topic);
 stream = streams.get(0);
 iterator = stream.iterator();
}
org.apache.gobblin.kafka.schemaregKafkaSchemaRegistryFactory

Javadoc

A Factory that constructs and hands back KafkaSchemaRegistry implementations.

Most used methods

  • getSchemaRegistry

Popular in Java

  • Updating database using SQL prepared statement
  • addToBackStack (FragmentTransaction)
  • requestLocationUpdates (LocationManager)
  • findViewById (Activity)
  • FileWriter (java.io)
    A specialized Writer that writes to a file in the file system. All write requests made by calling me
  • InputStreamReader (java.io)
    A class for turning a byte stream into a character stream. Data read from the source input stream is
  • Connection (java.sql)
    A connection represents a link from a Java application to a database. All SQL statements and results
  • Enumeration (java.util)
    A legacy iteration interface.New code should use Iterator instead. Iterator replaces the enumeration
  • XPath (javax.xml.xpath)
    XPath provides access to the XPath evaluation environment and expressions. Evaluation of XPath Expr
  • Table (org.hibernate.mapping)
    A relational table
  • Top Vim plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now