public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) { String handlerClass = tableDesc.getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); try { HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, handlerClass); if (storageHandler != null) { storageHandler.configureJobConf(tableDesc, jobConf); } } catch (HiveException e) { throw new RuntimeException(e); } }
public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) { String handlerClass = tableDesc.getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); try { HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, handlerClass); if (storageHandler != null) { storageHandler.configureJobConf(tableDesc, jobConf); } if (tableDesc.getJobSecrets() != null) { for (Map.Entry<String, String> entry : tableDesc.getJobSecrets().entrySet()) { String key = TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT + tableDesc.getTableName() + TableDesc.SECRET_DELIMIT + entry.getKey(); jobConf.getCredentials().addSecretKey(new Text(key), entry.getValue().getBytes()); } tableDesc.getJobSecrets().clear(); } } catch (HiveException e) { throw new RuntimeException(e); } }
public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) { String handlerClass = tableDesc.getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); try { HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, handlerClass); if (storageHandler != null) { storageHandler.configureJobConf(tableDesc, jobConf); } } catch (HiveException e) { throw new RuntimeException(e); } }