Tabnine Logo
HiveSerDeWrapper.getSerDe
Code IndexAdd Tabnine to your IDE (free)

How to use
getSerDe
method
in
org.apache.gobblin.hive.HiveSerDeWrapper

Best Java code snippets using org.apache.gobblin.hive.HiveSerDeWrapper.getSerDe (Showing top 6 results out of 315)

origin: apache/incubator-gobblin

@Override
public HiveSerDeConverter init(WorkUnitState state) {
 super.init(state);
 Configuration conf = HadoopUtils.getConfFromState(state);
 try {
  this.serializer = HiveSerDeWrapper.getSerializer(state).getSerDe();
  this.deserializer = HiveSerDeWrapper.getDeserializer(state).getSerDe();
  this.deserializer.initialize(conf, state.getProperties());
  setColumnsIfPossible(state);
  this.serializer.initialize(conf, state.getProperties());
 } catch (IOException e) {
  log.error("Failed to instantiate serializer and deserializer", e);
  throw Throwables.propagate(e);
 } catch (SerDeException e) {
  log.error("Failed to initialize serializer and deserializer", e);
  throw Throwables.propagate(e);
 }
 return this;
}
origin: apache/incubator-gobblin

@SuppressWarnings("deprecation")
@Override
public DataWriter<Writable> build() throws IOException {
 Preconditions.checkNotNull(this.destination);
 Preconditions.checkArgument(!Strings.isNullOrEmpty(this.writerId));
 State properties = this.destination.getProperties();
 if (!properties.contains(WRITER_WRITABLE_CLASS) || !properties.contains(WRITER_OUTPUT_FORMAT_CLASS)) {
  HiveSerDeWrapper serializer = HiveSerDeWrapper.getSerializer(properties);
  properties.setProp(WRITER_WRITABLE_CLASS, serializer.getSerDe().getSerializedClass().getName());
  properties.setProp(WRITER_OUTPUT_FORMAT_CLASS, serializer.getOutputFormatClassName());
 }
 return new HiveWritableHdfsDataWriter(this, properties);
}
origin: apache/incubator-gobblin

/**
 * Add an Avro {@link Schema} to the given {@link HiveRegistrationUnit}.
 *
 *  <p>
 *    If {@link #USE_SCHEMA_FILE} is true, the schema will be added via {@link #SCHEMA_URL} pointing to
 *    the schema file named {@link #SCHEMA_FILE_NAME}.
 *  </p>
 *
 *  <p>
 *    If {@link #USE_SCHEMA_FILE} is false, the schema will be obtained by {@link #getDirectorySchema(Path)}.
 *    If the length of the schema is less than {@link #SCHEMA_LITERAL_LENGTH_LIMIT}, it will be added via
 *    {@link #SCHEMA_LITERAL}. Otherwise, the schema will be written to {@link #SCHEMA_FILE_NAME} and added
 *    via {@link #SCHEMA_URL}.
 *  </p>
 */
@Override
public void addSerDeProperties(Path path, HiveRegistrationUnit hiveUnit) throws IOException {
 Preconditions.checkArgument(this.fs.getFileStatus(path).isDirectory(), path + " is not a directory.");
 Schema schema;
 try (Timer.Context context = metricContext.timer(HIVE_SPEC_SCHEMA_READING_TIMER).time()) {
  schema = getDirectorySchema(path);
 }
 if (schema == null) {
  return;
 }
 hiveUnit.setSerDeType(this.serDeWrapper.getSerDe().getClass().getName());
 hiveUnit.setInputFormat(this.serDeWrapper.getInputFormatClassName());
 hiveUnit.setOutputFormat(this.serDeWrapper.getOutputFormatClassName());
 addSchemaProperties(path, hiveUnit, schema);
}
origin: org.apache.gobblin/gobblin-core

@Override
public HiveSerDeConverter init(WorkUnitState state) {
 super.init(state);
 Configuration conf = HadoopUtils.getConfFromState(state);
 try {
  this.serializer = HiveSerDeWrapper.getSerializer(state).getSerDe();
  this.deserializer = HiveSerDeWrapper.getDeserializer(state).getSerDe();
  this.deserializer.initialize(conf, state.getProperties());
  setColumnsIfPossible(state);
  this.serializer.initialize(conf, state.getProperties());
 } catch (IOException e) {
  log.error("Failed to instantiate serializer and deserializer", e);
  throw Throwables.propagate(e);
 } catch (SerDeException e) {
  log.error("Failed to initialize serializer and deserializer", e);
  throw Throwables.propagate(e);
 }
 return this;
}
origin: org.apache.gobblin/gobblin-core

@SuppressWarnings("deprecation")
@Override
public DataWriter<Writable> build() throws IOException {
 Preconditions.checkNotNull(this.destination);
 Preconditions.checkArgument(!Strings.isNullOrEmpty(this.writerId));
 State properties = this.destination.getProperties();
 if (!properties.contains(WRITER_WRITABLE_CLASS) || !properties.contains(WRITER_OUTPUT_FORMAT_CLASS)) {
  HiveSerDeWrapper serializer = HiveSerDeWrapper.getSerializer(properties);
  properties.setProp(WRITER_WRITABLE_CLASS, serializer.getSerDe().getSerializedClass().getName());
  properties.setProp(WRITER_OUTPUT_FORMAT_CLASS, serializer.getOutputFormatClassName());
 }
 return new HiveWritableHdfsDataWriter(this, properties);
}
origin: org.apache.gobblin/gobblin-hive-registration

/**
 * Add an Avro {@link Schema} to the given {@link HiveRegistrationUnit}.
 *
 *  <p>
 *    If {@link #USE_SCHEMA_FILE} is true, the schema will be added via {@link #SCHEMA_URL} pointing to
 *    the schema file named {@link #SCHEMA_FILE_NAME}.
 *  </p>
 *
 *  <p>
 *    If {@link #USE_SCHEMA_FILE} is false, the schema will be obtained by {@link #getDirectorySchema(Path)}.
 *    If the length of the schema is less than {@link #SCHEMA_LITERAL_LENGTH_LIMIT}, it will be added via
 *    {@link #SCHEMA_LITERAL}. Otherwise, the schema will be written to {@link #SCHEMA_FILE_NAME} and added
 *    via {@link #SCHEMA_URL}.
 *  </p>
 */
@Override
public void addSerDeProperties(Path path, HiveRegistrationUnit hiveUnit) throws IOException {
 Preconditions.checkArgument(this.fs.getFileStatus(path).isDirectory(), path + " is not a directory.");
 Schema schema;
 try (Timer.Context context = metricContext.timer(HIVE_SPEC_SCHEMA_READING_TIMER).time()) {
  schema = getDirectorySchema(path);
 }
 if (schema == null) {
  return;
 }
 hiveUnit.setSerDeType(this.serDeWrapper.getSerDe().getClass().getName());
 hiveUnit.setInputFormat(this.serDeWrapper.getInputFormatClassName());
 hiveUnit.setOutputFormat(this.serDeWrapper.getOutputFormatClassName());
 addSchemaProperties(path, hiveUnit, schema);
}
org.apache.gobblin.hiveHiveSerDeWrappergetSerDe

Javadoc

Get the SerDe instance associated with this HiveSerDeWrapper. This method performs lazy initialization.

Popular methods of HiveSerDeWrapper

  • getInputFormatClassName
    Get the input format class name associated with this HiveSerDeWrapper.
  • getOutputFormatClassName
    Get the output format class name associated with this HiveSerDeWrapper.
  • <init>
  • get
    Get an instance of HiveSerDeWrapper.
  • getDeserializer
    Get an instance of HiveSerDeWrapper from a State.
  • getSerializer
    Get an instance of HiveSerDeWrapper from a State.

Popular in Java

  • Making http requests using okhttp
  • getSharedPreferences (Context)
  • getResourceAsStream (ClassLoader)
  • onRequestPermissionsResult (Fragment)
  • Kernel (java.awt.image)
  • FileInputStream (java.io)
    An input stream that reads bytes from a file. File file = ...finally if (in != null) in.clos
  • InetAddress (java.net)
    An Internet Protocol (IP) address. This can be either an IPv4 address or an IPv6 address, and in pra
  • UnknownHostException (java.net)
    Thrown when a hostname can not be resolved.
  • JarFile (java.util.jar)
    JarFile is used to read jar entries and their associated data from jar files.
  • Option (scala)
  • Top PhpStorm plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now