public static Iterable<String> matchesDb(Hive db, String dbPattern) throws HiveException { if (dbPattern == null) { return db.getAllDatabases(); } else { return db.getDatabasesByPattern(dbPattern); } }
private Iterable<? extends String> matchesDb(String dbPattern) throws HiveException { if (dbPattern == null) { return db.getAllDatabases(); } else { return db.getDatabasesByPattern(dbPattern); } } }
/** * Initialize the registry for the given database. It will extract the materialized views * that are enabled for rewriting from the metastore for the current user, parse them, * and register them in this cache. * * The loading process runs on the background; the method returns in the moment that the * runnable task is created, thus the views will still not be loaded in the cache when * it does. */ public void init(final Hive db) { try { List<Table> tables = new ArrayList<Table>(); for (String dbName : db.getAllDatabases()) { // TODO: We should enhance metastore API such that it returns only // materialized views instead of all tables tables.addAll(db.getAllTableObjects(dbName)); } pool.submit(new Loader(tables)); } catch (HiveException e) { LOG.error("Problem connecting to the metastore when initializing the view registry"); } }
@Override public void run() { try { SessionState ss = new SessionState(db.getConf()); ss.setIsHiveServerQuery(true); // All is served from HS2, we do not need e.g. Tez sessions SessionState.start(ss); final boolean cache = !db.getConf() .get(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname).equals("DUMMY"); for (String dbName : db.getAllDatabases()) { for (Table mv : db.getAllMaterializedViewObjects(dbName)) { addMaterializedView(db.getConf(), mv, OpType.LOAD, cache); } } initialized.set(true); LOG.info("Materialized views registry has been initialized"); } catch (HiveException e) { LOG.error("Problem connecting to the metastore when initializing the view registry", e); } } }
/** * Write a list of the available databases to a file. * * @param showDatabasesDesc * These are the databases we're interested in. * @return Returns 0 when execution succeeds and above 0 if it fails. * @throws HiveException * Throws this exception if an unexpected error occurs. */ private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException { // get the databases for the desired pattern - populate the output stream List<String> databases = null; if (showDatabasesDesc.getPattern() != null) { LOG.debug("pattern: {}", showDatabasesDesc.getPattern()); databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); } else { databases = db.getAllDatabases(); } LOG.info("Found {} database(s) matching the SHOW DATABASES statement.", databases.size()); // write the results in the file DataOutputStream outStream = getOutputStream(showDatabasesDesc.getResFile()); try { formatter.showDatabases(outStream, databases); } catch (Exception e) { throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show databases"); } finally { IOUtils.closeStream(outStream); } return 0; }
/** * Write a list of the available databases to a file. * * @param showDatabasesDesc * These are the databases we're interested in. * @return Returns 0 when execution succeeds and above 0 if it fails. * @throws HiveException * Throws this exception if an unexpected error occurs. */ private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException { // get the databases for the desired pattern - populate the output stream List<String> databases = null; if (showDatabasesDesc.getPattern() != null) { LOG.info("pattern: " + showDatabasesDesc.getPattern()); databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); } else { databases = db.getAllDatabases(); } LOG.info("results : " + databases.size()); // write the results in the file DataOutputStream outStream = getOutputStream(showDatabasesDesc.getResFile()); try { formatter.showDatabases(outStream, databases); } catch (Exception e) { throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show databases"); } finally { IOUtils.closeStream(outStream); } return 0; }
/** * Removes all databases and tables from the metastore */ public static void cleanupHMS(Hive hive, Warehouse wh, FsPermission defaultPerm) throws HiveException, MetaException, NoSuchObjectException { for (String dbName : hive.getAllDatabases()) { if (dbName.equals("default")) { continue; } try { Path path = getDbPath(hive, wh, dbName); FileSystem whFs = path.getFileSystem(hive.getConf()); whFs.setPermission(path, defaultPerm); } catch (IOException ex) { //ignore } hive.dropDatabase(dbName, true, true, true); } //clean tables in default db for (String tablename : hive.getAllTables("default")) { hive.dropTable("default", tablename, true, true); } }
hm.getAllDatabases(); hm.dumpAndClearMetaCallTiming("test"); String logStr = appender.getOutput();
for (String dbName : db.getAllDatabases()) { SessionState.get().setCurrentDatabase(dbName); for (String tblName : db.getAllTables()) { for (String dbName : db.getAllDatabases()) { SessionState.get().setCurrentDatabase(dbName); for (String tblName : db.getAllTables()) {
private void importDatabases(boolean failOnError) throws Exception { List<String> databases = hiveClient.getAllDatabases(); for (String databaseName : databases) { Referenceable dbReference = registerDatabase(databaseName); if (dbReference != null) { importTables(dbReference, databaseName, failOnError); } } }
databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); } else { databases = db.getAllDatabases();
/** * @return get all database names */ @Override public List<String> getAllDatabases(LensSessionHandle sessionid) throws LensException { try (SessionContext ignored = new SessionContext(sessionid)){ return Hive.get(getSession(sessionid).getHiveConf()).getAllDatabases(); } catch (HiveException e) { throw new LensException(e); } }
databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); } else { databases = db.getAllDatabases();
public static void reloadFunctions() throws HiveException { Hive db = Hive.get(); for (String dbName : db.getAllDatabases()) { for (String functionName : db.getFunctions(dbName, "*")) { Function function = db.getFunction(dbName, functionName); try { FunctionRegistry.registerPermanentFunction( FunctionUtils.qualifyFunctionName(functionName, dbName), function.getClassName(), false, FunctionTask.toFunctionResource(function.getResourceUris())); } catch (Exception e) { LOG.warn("Failed to register persistent function " + functionName + ":" + function.getClassName() + ". Ignore and continue."); } } } }
List<String> schemas = hmc.getAllDatabases(); ArrayList<ExternalTableMetaData> externalTables = new ArrayList<>(); for (String schema : schemas) { List<String> schemas = hmc.getAllDatabases(); ArrayList<PolicyTableData> policyData = new ArrayList<>(); for (String schema : schemas) { List<String> dbList = hmc.getAllDatabases(); HashMap<String, List<String>> dbTablesMap = new HashMap<>(); for (String db : dbList) {