private long getDbId(Statement stmt, String db, String catalog) throws SQLException, HiveMetaException { String query = String.format(schemaTool.quote(DB_ID_QUERY), db, catalog); LOG.debug("Going to run " + query); try (ResultSet rs = stmt.executeQuery(query)) { if (!rs.next()) { throw new HiveMetaException("Unable to find database " + fromDb); } return rs.getLong(1); } }
task = new SchemaToolTaskCreateUser(); } else { throw new HiveMetaException("No task defined!"); return 0; } catch (HiveMetaException e) { logAndPrintToError(e.getMessage()); if (e.getCause() != null) { Throwable t = e.getCause(); logAndPrintToError("Underlying cause: " + t.getClass().getName() + " : " + t.getMessage()); if (e.getCause() instanceof SQLException) { logAndPrintToError("SQL Error code: " + ((SQLException) t).getErrorCode()); e.printStackTrace(); } else { logAndPrintToError("Use --verbose for detailed stacktrace.");
version = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo); } catch (HiveMetaException he) { System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage()); System.out.println("Failed in schema table validation."); LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage(), he); return false; throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage(), e); } finally { rs.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close resultset", e);
HiveConf.ConfVars.METASTOREPWD.varname)); } catch (IOException err) { throw new HiveMetaException("Error getting metastore password", err); if (e.getCause() != null) { Throwable t = e.getCause(); System.err.println("Underlying cause: " + t.getClass().getName() + " : " + t.getMessage()); if (e.getCause() instanceof SQLException) { System.err.println("SQL Error code: " + ((SQLException)t).getErrorCode()); e.printStackTrace(); } else { System.err.println("Use --verbose for detailed stacktrace.");
Assert.fail("did not get expected exception"); } catch (HiveMetaException hme) { String message = hme.getMessage(); Assert.assertTrue("Bad HiveMetaException message :" + message, message.contains("Failed to retrieve schema tables from Hive Metastore DB")); Throwable cause = hme.getCause(); Assert.assertNotNull("HiveMetaException did not contain a cause", cause); String causeMessage = cause.getMessage();
System.err.println(e); if (line.hasOption("verbose")) { e.printStackTrace();
boolean validateSchemaVersions() throws HiveMetaException { System.out.println("Validating schema version"); try { String hiveSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getHiveSchemaVersion(); MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false); String newSchemaVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo); schemaTool.assertCompatibleVersion(hiveSchemaVersion, newSchemaVersion); } catch (HiveMetaException hme) { if (hme.getMessage().contains("Metastore schema version is not compatible") || hme.getMessage().contains("Multiple versions were found in metastore") || hme.getMessage().contains("Could not find version info in metastore VERSION table")) { System.err.println(hme.getMessage()); System.out.println("[FAIL]\n"); return false; } else { throw hme; } } System.out.println("[SUCCESS]\n"); return true; }
version = metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false)); } catch (HiveMetaException he) { System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage()); System.out.println("Failed in schema table validation."); LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage()); return false; throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB", e); } finally { if (rs != null) { rs.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close resultset", e);
System.err.println(e); if (line.hasOption("verbose")) { e.printStackTrace();
@Override void execute() throws HiveMetaException { HiveSchemaHelper.MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(true); String dbVersion = null; try { dbVersion = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo); } catch (HiveMetaException e) { LOG.info("Exception getting db version:" + e.getMessage()); LOG.info("Try to initialize db schema"); } SchemaToolTask task; if (dbVersion == null) { task = new SchemaToolTaskInit(); } else { task = new SchemaToolTaskUpgrade(); } task.setHiveSchemaTool(schemaTool); task.setCommandLineArguments(cl); task.execute(); } }
private void updateDbNameForTable(Statement stmt, String tableName, String tableColumnName, String fromCat, String toCat, String fromDb, String toDb, String hiveTblName) throws HiveMetaException, SQLException { String update = String.format(schemaTool.quote(UPDATE_DB_NAME_STMT), tableName, toCat, toDb, fromCat, fromDb, tableColumnName, hiveTblName); LOG.debug("Going to run " + update); int numUpdated = stmt.executeUpdate(update); if (numUpdated > 1 || numUpdated < 0) { throw new HiveMetaException("Failed to properly update the " + tableName + " table. Expected to update 1 row but instead updated " + numUpdated); } } }
boolean validateSchemaVersions() throws HiveMetaException { System.out.println("Validating schema version"); try { String newSchemaVersion = metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false)); assertCompatibleVersion(metaStoreSchemaInfo.getHiveSchemaVersion(), newSchemaVersion); } catch (HiveMetaException hme) { if (hme.getMessage().contains("Metastore schema version is not compatible") || hme.getMessage().contains("Multiple versions were found in metastore") || hme.getMessage().contains("Could not find version info in metastore VERSION table")) { System.err.println(hme.getMessage()); System.out.println("Failed in schema version validation."); return false; } else { throw hme; } } System.out.println("Succeeded in schema version validation."); return true; }
@Override public String getCreateUserScript() throws HiveMetaException { String createScript = CREATE_USER_PREFIX + "." + dbType + SQL_FILE_EXTENSION; // check if the file exists if (!(new File(getMetaStoreScriptDir() + File.separatorChar + createScript).exists())) { throw new HiveMetaException("Unable to find create user file, expected: " + createScript); } return createScript; }
protected void testConnectionToMetastore() throws HiveMetaException { Connection conn = getConnectionToMetastore(true); try { conn.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close metastore connection", e); } }
protected void assertCompatibleVersion(String hiveSchemaVersion, String dbSchemaVersion) throws HiveMetaException { if (!metaStoreSchemaInfo.isVersionCompatible(hiveSchemaVersion, dbSchemaVersion)) { throw new HiveMetaException("Metastore schema version is not compatible. Hive Version: " + hiveSchemaVersion + ", Database Schema Version: " + dbSchemaVersion); } }
private void updateCatalogNameInTable(Statement stmt, String tableName, String catColName, String dbColName, String fromCatName, String toCatName, String dbName, boolean zeroUpdatesOk) throws HiveMetaException, SQLException { String update = String.format(schemaTool.quote(UPDATE_CATALOG_NAME_STMT), tableName, catColName, toCatName, catColName, fromCatName, dbColName, dbName); LOG.debug("Going to run " + update); int numUpdated = stmt.executeUpdate(update); if (numUpdated != 1 && !(zeroUpdatesOk && numUpdated == 0)) { throw new HiveMetaException("Failed to properly update the " + tableName + " table. Expected to update 1 row but instead updated " + numUpdated); } } }
private void loadAllUpgradeScripts(String dbType) throws HiveMetaException { // load upgrade order for the given dbType List<String> upgradeOrderList = new ArrayList<>(); String upgradeListFile = getMetaStoreScriptDir() + File.separator + VERSION_UPGRADE_LIST + "." + dbType; try (FileReader fr = new FileReader(upgradeListFile); BufferedReader bfReader = new BufferedReader(fr)) { String currSchemaVersion; while ((currSchemaVersion = bfReader.readLine()) != null) { upgradeOrderList.add(currSchemaVersion.trim()); } } catch (FileNotFoundException e) { throw new HiveMetaException("File " + upgradeListFile + "not found ", e); } catch (IOException e) { throw new HiveMetaException("Error reading " + upgradeListFile, e); } hiveSchemaVersions = upgradeOrderList.toArray(new String[0]); }
private int getNextCatalogId(Statement stmt) throws SQLException, HiveMetaException { String query = schemaTool.quote(NEXT_CATALOG_ID_QUERY); LOG.debug("Going to run " + query); try (ResultSet rs = stmt.executeQuery(query)) { if (!rs.next()) { throw new HiveMetaException("No catalogs found, have you upgraded the database?"); } int nextId = rs.getInt(1) + 1; // We need to stay out of the way of any sequences used by the underlying database. // Otherwise the next time the client tries to add a catalog we'll get an error. // There should never be billions of catalogs, so we'll shift our sequence number up // there to avoid clashes. int floor = 1 << 30; return Math.max(nextId, floor); } }
@VisibleForTesting boolean validateColumnNullValues(Connection conn) throws HiveMetaException { System.out.println("Validating columns for incorrect NULL values."); boolean isValid = true; String queryColumnNullValues = schemaTool.quote(QUERY_COLUMN_NULL_VALUES); try (Statement stmt = conn.createStatement(); ResultSet res = stmt.executeQuery(queryColumnNullValues)) { while (res.next()) { long tableId = res.getLong("TBL_ID"); String tableName = res.getString("TBL_NAME"); String tableType = res.getString("TBL_TYPE"); isValid = false; System.err.println("SD_ID in TBLS should not be NULL for Table Name=" + tableName + ", Table ID=" + tableId + ", Table Type=" + tableType); } System.out.println(isValid ? "[SUCCESS]\n" : "[FAIL]\n"); return isValid; } catch(SQLException e) { throw new HiveMetaException("Failed to validate columns for incorrect NULL values", e); } } }
private void oracleCreateUserHack(File createFile) throws HiveMetaException { LOG.debug("Found oracle, hacking our way through it rather than using SqlLine"); try (BufferedReader reader = new BufferedReader(new FileReader(createFile))) { try (Connection conn = schemaTool.getConnectionToMetastore(false)) { try (Statement stmt = conn.createStatement()) { reader.lines() .forEach(s -> { assert s.charAt(s.length() - 1) == ';'; try { stmt.execute(s.substring(0, s.length() - 1)); } catch (SQLException e) { LOG.error("statement <" + s.substring(0, s.length() - 2) + "> failed", e); throw new RuntimeException(e); } }); } } } catch (IOException e) { LOG.error("Caught IOException trying to read modified create user script " + createFile.getAbsolutePath(), e); throw new HiveMetaException(e); } catch (HiveMetaException e) { LOG.error("Failed to connect to RDBMS", e); throw e; } catch (SQLException e) { LOG.error("Got SQLException", e); } } }