congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
HCatUtil.getStorageHandler
Code IndexAdd Tabnine to your IDE (free)

How to use
getStorageHandler
method
in
org.apache.hive.hcatalog.common.HCatUtil

Best Java code snippets using org.apache.hive.hcatalog.common.HCatUtil.getStorageHandler (Showing top 20 results out of 315)

origin: apache/hive

private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor sd,
          Map<String, String> parameters, Configuration conf,
          InputJobInfo inputJobInfo) throws IOException {
 StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
 Properties hcatProperties = new Properties();
 HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 // copy the properties from storageHandler to jobProperties
 Map<String, String> jobProperties = HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);
 for (String key : parameters.keySet()) {
  hcatProperties.put(key, parameters.get(key));
 }
 // FIXME
 // Bloating partinfo with inputJobInfo is not good
 return new PartInfo(schema, storageHandler, sd.getLocation(),
  hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
origin: apache/hive

 .deserialize(jobInfoString);
StorerInfo storeInfo = jobInfo.getTableInfo().getStorerInfo();
HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
 context.getConfiguration(), storeInfo);
Class<? extends AbstractSerDe> serde = storageHandler.getSerDeClass();
origin: apache/hive

/**
 * @param context current JobContext
 * @param baseRecordWriter RecordWriter to contain
 * @throws IOException
 * @throws InterruptedException
 */
public DefaultRecordWriterContainer(TaskAttemptContext context,
         org.apache.hadoop.mapred.RecordWriter<? super WritableComparable<?>, ? super Writable> baseRecordWriter) throws IOException, InterruptedException {
 super(context, baseRecordWriter);
 jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
 storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
 HCatOutputFormat.configureOutputStorageHandler(context);
 serDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), context.getConfiguration());
 hcatRecordOI = InternalUtil.createStructObjectInspector(jobInfo.getOutputSchema());
 try {
  InternalUtil.initializeOutputSerDe(serDe, context.getConfiguration(), jobInfo);
 } catch (SerDeException e) {
  throw new IOException("Failed to initialize SerDe", e);
 }
}
origin: apache/hive

/**
 * Gets the output format instance.
 * @param context the job context
 * @return the output format instance
 * @throws IOException
 */
protected OutputFormat<WritableComparable<?>, HCatRecord> getOutputFormat(JobContext context) 
 throws IOException {
 OutputJobInfo jobInfo = getJobInfo(context.getConfiguration());
 HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), 
   jobInfo.getTableInfo().getStorerInfo());
 // Always configure storage handler with jobproperties/jobconf before calling any methods on it
 configureOutputStorageHandler(context);
 if (storageHandler instanceof FosterStorageHandler) {
  return new FileOutputFormatContainer(ReflectionUtils.newInstance(
    storageHandler.getOutputFormatClass(),context.getConfiguration()));
 }
 else { 
  return new DefaultOutputFormatContainer(ReflectionUtils.newInstance(
    storageHandler.getOutputFormatClass(),context.getConfiguration()));
 }
}
origin: apache/hive

public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
 return HCatUtil.getStorageHandler(
  conf,
  partitionInfo.getStorageHandlerClassName(),
  partitionInfo.getSerdeClassName(),
  partitionInfo.getInputFormatClassName(),
  partitionInfo.getOutputFormatClassName());
}
origin: apache/hive

try {
 OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
 HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(jobContext.getConfiguration(),jobInfo.getTableInfo().getStorerInfo());
origin: apache/hive

/**
 * @param context current JobContext
 * @param baseCommitter OutputCommitter to contain
 * @throws IOException
 */
public FileOutputCommitterContainer(JobContext context,
         org.apache.hadoop.mapred.OutputCommitter baseCommitter) throws IOException {
 super(context, baseCommitter);
 jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
 dynamicPartitioningUsed = jobInfo.isDynamicPartitioningUsed();
 this.partitionsDiscovered = !dynamicPartitioningUsed;
 cachedStorageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
 Table table = new Table(jobInfo.getTableInfo().getTable());
 if (dynamicPartitioningUsed && Boolean.parseBoolean((String)table.getProperty("EXTERNAL"))
   && jobInfo.getCustomDynamicPath() != null
   && jobInfo.getCustomDynamicPath().length() > 0) {
  customDynamicLocationUsed = true;
 } else {
  customDynamicLocationUsed = false;
 }
 this.maxAppendAttempts = context.getConfiguration().getInt(HCatConstants.HCAT_APPEND_LIMIT, APPEND_COUNTER_WARN_THRESHOLD);
}
origin: apache/hive

/**
 * Create an instance of a storage handler defined in storerInfo. If one cannot be found
 * then FosterStorageHandler is used to encapsulate the InputFormat, OutputFormat and SerDe.
 * This StorageHandler assumes the other supplied storage artifacts are for a file-based storage system.
 * @param conf job's configuration will be used to configure the Configurable StorageHandler
 * @param storerInfo StorerInfo to definining the StorageHandler and InputFormat, OutputFormat and SerDe
 * @return storageHandler instance
 * @throws IOException
 */
public static HiveStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
 return getStorageHandler(conf,
  storerInfo.getStorageHandlerClass(),
  storerInfo.getSerdeClass(),
  storerInfo.getIfClass(),
  storerInfo.getOfClass());
}
origin: apache/hive

/**
 * @param baseWriter RecordWriter to contain
 * @param context current TaskAttemptContext
 * @throws IOException
 * @throws InterruptedException
 */
public FileRecordWriterContainer(
  RecordWriter<? super WritableComparable<?>, ? super Writable> baseWriter,
  TaskAttemptContext context) throws IOException, InterruptedException {
 super(context, baseWriter);
 this.context = context;
 jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
 storageHandler =
   HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo()
     .getStorerInfo());
 serDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), context.getConfiguration());
 objectInspector = InternalUtil.createStructObjectInspector(jobInfo.getOutputSchema());
 try {
  InternalUtil.initializeOutputSerDe(serDe, context.getConfiguration(), jobInfo);
 } catch (SerDeException e) {
  throw new IOException("Failed to inialize SerDe", e);
 }
 // If partition columns occur in data, we want to remove them.
 partColsToDel = jobInfo.getPosOfPartCols();
 if (partColsToDel == null) {
  throw new HCatException("It seems that setSchema() is not called on "
    + "HCatOutputFormat. Please make sure that method is called.");
 }
}
origin: apache/hive

HCatUtil.copyJobPropertiesToJobConf(jobProperties, jobConf);
storageHandler = HCatUtil.getStorageHandler(
 jobConf, partitionInfo);
origin: apache/hive

try {
 HiveStorageHandler storageHandlerInst = HCatUtil
  .getStorageHandler(context.getConf(),
   desc.getStorageHandler(),
   desc.getSerName(),
origin: apache/hive

Configuration conf = jobContext.getConfiguration();
HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
 conf, partitionInfo);
origin: apache/hive

HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
origin: com.github.hyukjinkwon.hcatalog/hive-hcatalog-core

private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor sd,
          Map<String, String> parameters, Configuration conf,
          InputJobInfo inputJobInfo) throws IOException {
 StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
 Properties hcatProperties = new Properties();
 HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 // copy the properties from storageHandler to jobProperties
 Map<String, String> jobProperties = HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);
 for (String key : parameters.keySet()) {
  hcatProperties.put(key, parameters.get(key));
 }
 // FIXME
 // Bloating partinfo with inputJobInfo is not good
 return new PartInfo(schema, storageHandler, sd.getLocation(),
  hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
origin: org.spark-project.hive.hcatalog/hive-hcatalog-core

private static PartInfo extractPartInfo(HCatSchema schema, StorageDescriptor sd,
          Map<String, String> parameters, Configuration conf,
          InputJobInfo inputJobInfo) throws IOException {
 StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters);
 Properties hcatProperties = new Properties();
 HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, storerInfo);
 // copy the properties from storageHandler to jobProperties
 Map<String, String> jobProperties = HCatUtil.getInputJobProperties(storageHandler, inputJobInfo);
 for (String key : parameters.keySet()) {
  hcatProperties.put(key, parameters.get(key));
 }
 // FIXME
 // Bloating partinfo with inputJobInfo is not good
 return new PartInfo(schema, storageHandler, sd.getLocation(),
  hcatProperties, jobProperties, inputJobInfo.getTableInfo());
}
origin: com.facebook.presto.hive/hive-apache

public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
 return HCatUtil.getStorageHandler(
  conf,
  partitionInfo.getStorageHandlerClassName(),
  partitionInfo.getSerdeClassName(),
  partitionInfo.getInputFormatClassName(),
  partitionInfo.getOutputFormatClassName());
}
origin: org.spark-project.hive.hcatalog/hive-hcatalog-core

public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
 return HCatUtil.getStorageHandler(
  conf,
  partitionInfo.getStorageHandlerClassName(),
  partitionInfo.getSerdeClassName(),
  partitionInfo.getInputFormatClassName(),
  partitionInfo.getOutputFormatClassName());
}
origin: com.github.hyukjinkwon.hcatalog/hive-hcatalog-core

public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
 return HCatUtil.getStorageHandler(
  conf,
  partitionInfo.getStorageHandlerClassName(),
  partitionInfo.getSerdeClassName(),
  partitionInfo.getInputFormatClassName(),
  partitionInfo.getOutputFormatClassName());
}
origin: org.apache.hive.hcatalog/hive-hcatalog-core

public static HiveStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
 return HCatUtil.getStorageHandler(
  conf,
  partitionInfo.getStorageHandlerClassName(),
  partitionInfo.getSerdeClassName(),
  partitionInfo.getInputFormatClassName(),
  partitionInfo.getOutputFormatClassName());
}
origin: org.apache.hive.hcatalog/hive-hcatalog-core

/**
 * Create an instance of a storage handler defined in storerInfo. If one cannot be found
 * then FosterStorageHandler is used to encapsulate the InputFormat, OutputFormat and SerDe.
 * This StorageHandler assumes the other supplied storage artifacts are for a file-based storage system.
 * @param conf job's configuration will be used to configure the Configurable StorageHandler
 * @param storerInfo StorerInfo to definining the StorageHandler and InputFormat, OutputFormat and SerDe
 * @return storageHandler instance
 * @throws IOException
 */
public static HiveStorageHandler getStorageHandler(Configuration conf, StorerInfo storerInfo) throws IOException {
 return getStorageHandler(conf,
  storerInfo.getStorageHandlerClass(),
  storerInfo.getSerdeClass(),
  storerInfo.getIfClass(),
  storerInfo.getOfClass());
}
org.apache.hive.hcatalog.commonHCatUtilgetStorageHandler

Javadoc

Create an instance of a storage handler. If storageHandler == null, then surrrogate StorageHandler is used to encapsulate the InputFormat, OutputFormat and SerDe. This StorageHandler assumes the other supplied storage artifacts are for a file-based storage system.

Popular methods of HCatUtil

  • getHiveMetastoreClient
    Get or create a hive client depending on whether it exits in cache or not
  • getTable
  • closeHiveClientQuietly
  • deserialize
  • getHiveConf
  • extractSchema
  • serialize
  • validateExecuteBitPresentIfReadOrWrite
    Ensure that read or write permissions are not granted without also granting execute permissions. Ess
  • validateMorePermissive
    Test if the first FsAction is more permissive than the second. This is useful in cases where we want
  • configureOutputStorageHandler
  • copyConf
    Replace the contents of dest with the contents of src
  • copyJobPropertiesToJobConf
  • copyConf,
  • copyJobPropertiesToJobConf,
  • decodeBytes,
  • encodeBytes,
  • extractThriftToken,
  • getFieldSchemaList,
  • getHCatFieldSchemaList,
  • getInputJobProperties,
  • getJobConfFromContext

Popular in Java

  • Parsing JSON documents to java classes using gson
  • getSharedPreferences (Context)
  • getSystemService (Context)
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • Table (com.google.common.collect)
    A collection that associates an ordered pair of keys, called a row key and a column key, with a sing
  • Kernel (java.awt.image)
  • BufferedReader (java.io)
    Wraps an existing Reader and buffers the input. Expensive interaction with the underlying reader is
  • ByteBuffer (java.nio)
    A buffer for bytes. A byte buffer can be created in either one of the following ways: * #allocate
  • Date (java.util)
    A specific moment in time, with millisecond precision. Values typically come from System#currentTime
  • JarFile (java.util.jar)
    JarFile is used to read jar entries and their associated data from jar files.
  • Top PhpStorm plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now