public void configure(JobConf job) { try { initialize(job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); } }
public void abortTransactions(List<Long> txnids) throws HiveException { try { getMSC().abortTxns(txnids); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); } }
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws HiveException { try { return getMSC().deleteTableColumnStatistics(dbName, tableName, colName); } catch(Exception e) { LOG.debug(StringUtils.stringifyException(e)); throw new HiveException(e); } }
String getAllProcessInfoFromShell() { try { ShellCommandExecutor shellExecutor = new ShellCommandExecutor( new String[] {Shell.getWinUtilsFile().getCanonicalPath(), "task", "processList", taskProcessId }); shellExecutor.execute(); return shellExecutor.getOutput(); } catch (IOException e) { LOG.error(StringUtils.stringifyException(e)); } return null; }
@SuppressWarnings("deprecation") private boolean run() throws IOException, NoSuchAlgorithmException { LOG.info("ApplicationMaster Starting ..."); LOG.info("Try to allocate " + psNum + " ps/server containers"); LOG.debug("current hadoop version don't have the method updateBlacklist of Class " + amrmAsync.getClass().toString() + ". For More Detail:" + e); } catch (InvocationTargetException e) { LOG.error("InvocationTargetException : " + e); } catch (IllegalAccessException e) { LOG.error("IllegalAccessException : " + e); LOG.info("Canceling container: " + container.getId().toString()); amrmAsync.releaseAssignedContainer(container.getId()); amrmAsync.addContainerRequest(psContainerRequest); LOG.debug("current hadoop version don't have the method updateBlacklist of Class " + amrmAsync.getClass().toString() + ". For More Detail:" + e); } catch (InvocationTargetException e) { LOG.error("invoke the method updateBlacklist of Class " + amrmAsync.getClass().toString() + " InvocationTargetException Error : " + e); } catch (IllegalAccessException e) { LOG.error("invoke the method updateBlacklist of Class " + amrmAsync.getClass().toString() + " IllegalAccessException Error : " + e); finalSuccess = false; this.appendMessage("Some error occurs" + org.apache.hadoop.util.StringUtils.stringifyException(e), true); diagnostics = e.getMessage();
threshold = Float.parseFloat(args[++i]); if(threshold > 1){ LOG.warn("Summary threshold is larger than 1.0 ! Value should be within [0.0,1.0] !"); threshold = 1.0f; LOG.warn("Summary threshold is smaller than 0.0 ! Value should be within [0.0,1.0] !"); threshold = 0.0f; tputSampleUnit = GIGA; else { LOG.warn("Illegal format of parameter \"sampleUnit\", Ignored."); LOG.info("nrFiles = " + nrFiles); LOG.info("fileSize (MB) = " + fileSize); LOG.info("bufferSize = " + bufferSize); System.err.print(StringUtils.stringifyException(e)); return -1;
LOG.info("nrFiles = " + nrFiles); LOG.info("fileSize (MB) = " + fileSize); LOG.info("bufferSize = " + bufferSize); System.err.print(StringUtils.stringifyException(e)); return -1;
private void shutdown() { try { connection.commit(); connection.close(); connection = null; } catch (Throwable ex) { LOG.warn("Exception occurred while closing connection :" + StringUtils.stringifyException(ex)); } }
public void validatePartitionNameCharacters(List<String> partVals) throws HiveException { try { getMSC().validatePartitionNameCharacters(partVals); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); } }
protected Hive getHive() { try { return Hive.getWithFastCheck(conf); } catch (HiveException e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
public void createTableFromSQL(String sql) throws SQLException { Connection dbcon = this.getConnection(); System.out.println("SQL : " + sql); this.dropTableIfExists("TPCH1M_LINEITEM"); try { Statement st = dbcon.createStatement(); int res = st.executeUpdate(sql); System.out.println("Result : " + res); } catch (SQLException e) { LOG.error("Got SQLException during creating table: " + StringUtils.stringifyException(e)); } }
@SuppressWarnings("unchecked") protected void onAMLaunchFailed(ContainerId containerId, Exception ie) { String message = "Error launching " + application.getAppAttemptId() + ". Got exception: " + StringUtils.stringifyException(ie); LOG.info(message); handler.handle(new RMAppAttemptEvent(application .getAppAttemptId(), RMAppAttemptEventType.LAUNCH_FAILED, message)); } }
@Override @After public void tearDown() { try { shutdown(); } catch (Exception e) { LOG.warn("Error shutting down HBase minicluster: " + StringUtils.stringifyException(e)); } super.tearDown(); }
public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName, String colName) throws HiveException { try { return getMSC().deletePartitionColumnStatistics(dbName, tableName, partName, colName); } catch(Exception e) { LOG.debug(StringUtils.stringifyException(e)); throw new HiveException(e); } }
public void cancelDelegationToken(String tokenStrForm) throws HiveException { try { getMSC().cancelDelegationToken(tokenStrForm); } catch(Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); } }
protected Hive getHive() { try { return Hive.getWithFastCheck(conf); } catch (HiveException e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Before public void setUp() { super.setUp(); Path p = new Path(getWarehouseDir()); try { FileSystem fs = FileSystem.get(new Configuration()); fs.delete(p); } catch (IOException e) { LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(e)); fail("Setup fail with IOException: " + StringUtils.stringifyException(e)); } }
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws HiveException { try { return getMSC().deleteTableColumnStatistics(dbName, tableName, colName); } catch(Exception e) { LOG.debug(StringUtils.stringifyException(e)); throw new HiveException(e); } }
public String getDelegationToken(String owner, String renewer) throws HiveException{ try { return getMSC().getDelegationToken(owner, renewer); } catch(Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new HiveException(e); } }
private void failed(Throwable e) { while (e.getCause() != null && e.getClass() == RuntimeException.class) { e = e.getCause(); } setException(e); LOG.error(stringifyException(e)); }