private static TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null) { final String storageHandlerStr = tabMetaData.getStorageHandler().toString(); if (storageHandlerStr.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) { return TableType.DRUID; } if (storageHandlerStr.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) { return TableType.JDBC; } } return TableType.NATIVE; }
private TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null && tabMetaData.getStorageHandler().toString().equals( Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) { return TableType.DRUID; } return TableType.NATIVE; }
private static TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null && tabMetaData.getStorageHandler().toString().equals( Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) { return TableType.DRUID; } return TableType.NATIVE; }
private TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null) { final String storageHandlerStr = tabMetaData.getStorageHandler().toString(); if (storageHandlerStr .equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) { return TableType.DRUID; } if (storageHandlerStr .equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) { return TableType.JDBC; } } return TableType.NATIVE; }
public static boolean doesTableNeedLocation(Table tbl) { // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers, // this method could be moved to the HiveStorageHandler interface. boolean retval = true; if (tbl.getStorageHandler() != null) { String sh = tbl.getStorageHandler().toString(); retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler") && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID); } return retval; } }
public static boolean doesTableNeedLocation(Table tbl) { // TODO: If we are ok with breaking compatibility of existing 3rd party StorageHandlers, // this method could be moved to the HiveStorageHandler interface. boolean retval = true; if (tbl.getStorageHandler() != null) { // TODO: why doesn't this check class name rather than toString? String sh = tbl.getStorageHandler().toString(); retval = !sh.equals("org.apache.hadoop.hive.hbase.HBaseStorageHandler") && !sh.equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID) && !sh.equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID) && !sh.equals("org.apache.hadoop.hive.accumulo.AccumuloStorageHandler"); } return retval; }
final public Class<? extends InputFormat> getInputFormatClass() { if (inputFormatClass == null) { try { String className = tTable.getSd().getInputFormat(); if (className == null) { if (getStorageHandler() == null) { return null; } inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class<? extends InputFormat>) Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return inputFormatClass; }
final public Class<? extends InputFormat> getInputFormatClass() { if (inputFormatClass == null) { try { String className = tTable.getSd().getInputFormat(); if (className == null) { if (getStorageHandler() == null) { return null; } inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class<? extends InputFormat>) Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return inputFormatClass; }
final public Class<? extends OutputFormat> getOutputFormatClass() { if (outputFormatClass == null) { try { String className = tTable.getSd().getOutputFormat(); Class<?> c; if (className == null) { if (getStorageHandler() == null) { return null; } c = getStorageHandler().getOutputFormatClass(); } else { c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } // Replace FileOutputFormat for backward compatibility outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return outputFormatClass; }
final public Class<? extends OutputFormat> getOutputFormatClass() { if (outputFormatClass == null) { try { String className = tTable.getSd().getOutputFormat(); Class<?> c; if (className == null) { if (getStorageHandler() == null) { return null; } c = getStorageHandler().getOutputFormatClass(); } else { c = Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } // Replace FileOutputFormat for backward compatibility outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } return outputFormatClass; }
private void processAlias(MapWork work, Set<TableScanOperator> tableScans) { Set<String> aliases = new HashSet<>(); for (TableScanOperator tso : tableScans) { // use LinkedHashMap<String, Operator<? extends OperatorDesc>> // getAliasToWork() should not apply this for non-native table if (tso.getConf().getTableMetadata().getStorageHandler() != null) { continue; } String alias = getAliasForTableScanOperator(work, tso); aliases.add(alias); tso.getConf().setIsMetadataOnly(true); } // group path alias according to work Map<Path, ArrayList<String>> candidates = new HashMap<>(); for (Path path : work.getPaths()) { ArrayList<String> aliasesAffected = work.getPathToAliases().get(path); if (CollectionUtils.isNotEmpty(aliasesAffected)) { candidates.put(path, aliasesAffected); } } for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) { processAlias(work, entry.getKey(), entry.getValue(), aliases); } }
private void processAlias(MapWork work, HashSet<TableScanOperator> tableScans) { ArrayList<String> aliases = new ArrayList<String>(); for (TableScanOperator tso : tableScans) { // use LinkedHashMap<String, Operator<? extends OperatorDesc>> // getAliasToWork() // should not apply this for non-native table if (tso.getConf().getTableMetadata().getStorageHandler() != null) { continue; } String alias = getAliasForTableScanOperator(work, tso); aliases.add(alias); tso.getConf().setIsMetadataOnly(true); } // group path alias according to work LinkedHashMap<Path, ArrayList<String>> candidates = new LinkedHashMap<>(); for (Path path : work.getPaths()) { ArrayList<String> aliasesAffected = work.getPathToAliases().get(path); if (aliasesAffected != null && aliasesAffected.size() > 0) { candidates.put(path, aliasesAffected); } } for (Entry<Path, ArrayList<String>> entry : candidates.entrySet()) { processAlias(work, entry.getKey(), entry.getValue(), aliases); } }
Utilities.setColumnNameList(jobConf, scanOp, true); Utilities.setColumnTypeList(jobConf, scanOp, true); HiveStorageHandler handler = table.getStorageHandler(); if (handler instanceof InputEstimator) { InputEstimator estimator = (InputEstimator) handler;
private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException { try{ HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook(); if (hook == null || !(hook instanceof DefaultHiveMetaHook)) { return 0; } DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook; hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite()); } catch (MetaException e) { throw new HiveException(e); } return 0; }
&& jobConf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname).equals("mr") && (scanDesc.getTableMetadata() == null || !(scanDesc.getTableMetadata().getStorageHandler() instanceof HiveStoragePredicateHandler))) { return;
private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException { try{ HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook(); if (hook == null || !(hook instanceof DefaultHiveMetaHook)) { return 0; } DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook; hiveMetaHook.preInsertTable(preInsertTableDesc.getTable().getTTable(), preInsertTableDesc.isOverwrite()); } catch (MetaException e) { throw new HiveException(e); } return 0; }
return originalPredicate; HiveStorageHandler storageHandler = tbl.getStorageHandler(); if (!(storageHandler instanceof HiveStoragePredicateHandler)) {
private int insertCommitWork(Hive db, InsertCommitHookDesc insertCommitHookDesc) throws MetaException { boolean failed = true; HiveMetaHook hook = insertCommitHookDesc.getTable().getStorageHandler().getMetaHook(); if (hook == null || !(hook instanceof DefaultHiveMetaHook)) { return 0; } DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook; try { hiveMetaHook.commitInsertTable(insertCommitHookDesc.getTable().getTTable(), insertCommitHookDesc.isOverwrite() ); failed = false; } finally { if (failed) { hiveMetaHook.rollbackInsertTable(insertCommitHookDesc.getTable().getTTable(), insertCommitHookDesc.isOverwrite() ); } } return 0; }
private int insertCommitWork(Hive db, InsertTableDesc insertTableDesc) throws MetaException { boolean failed = true; HiveMetaHook hook = insertTableDesc.getTable().getStorageHandler().getMetaHook(); if (hook == null || !(hook instanceof DefaultHiveMetaHook)) { return 0; } DefaultHiveMetaHook hiveMetaHook = (DefaultHiveMetaHook) hook; try { hiveMetaHook.commitInsertTable(insertTableDesc.getTable().getTTable(), insertTableDesc.isOverwrite() ); failed = false; } finally { if (failed) { hiveMetaHook.rollbackInsertTable(insertTableDesc.getTable().getTTable(), insertTableDesc.isOverwrite() ); } } return 0; }
getStorageHandler()); HiveStorageHandler storageHandler = tbl.getStorageHandler();