private static void assertTypeInCategory(Type type, Category category, String fieldName) throws HCatException { Category typeCategory = Category.fromType(type); if (typeCategory != category) { throw new HCatException("Type category mismatch. Expected " + category + " but type " + type + " in category " + typeCategory + " (field " + fieldName + ")"); } }
private static void assertTypeNotInCategory(Type type, Category category) throws HCatException { Category typeCategory = Category.fromType(type); if (typeCategory == category) { throw new HCatException("Type category mismatch. Expected type " + type + " not in category " + category + " but was so."); } }
/** * Setter for the "root" location of the HCatPartitionSpec. * @param location The new "root" location of the HCatPartitionSpec. * @throws HCatException On failure to set a new location. */ public void setRootLocation(String location) throws HCatException { try { partitionSpecProxy.setRootLocation(location); } catch (MetaException metaException) { throw new HCatException("Unable to set root-path!", metaException); } }
@Override void initialize(Configuration conf) throws HCatException { this.config = conf; try { hiveConfig = HCatUtil.getHiveConf(config); hmsClient = HCatUtil.getHiveMetastoreClient(hiveConfig); } catch (MetaException exp) { throw new HCatException("MetaException while creating HMS client", exp); } catch (IOException exp) { throw new HCatException("IOException while creating HMS client", exp); } }
/** * Gets the HCatOuputJobInfo object by reading the Configuration and deserializing * the string. If InputJobInfo is not present in the configuration, throws an * exception since that means HCatOutputFormat.setOutput has not been called. * @param conf the job Configuration object * @return the OutputJobInfo object * @throws IOException the IO exception */ public static OutputJobInfo getJobInfo(Configuration conf) throws IOException { String jobString = conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO); if (jobString == null) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED); } return (OutputJobInfo) HCatUtil.deserialize(jobString); }
public void append(final HCatFieldSchema hfs) throws HCatException { if (hfs == null) throw new HCatException("Attempt to append null HCatFieldSchema in HCatSchema."); String fieldName = normalizeName(hfs.getName()); if (fieldPositionMap.containsKey(fieldName)) throw new HCatException("Attempt to append HCatFieldSchema with already " + "existing name: " + fieldName + "."); this.fieldSchemas.add(hfs); this.fieldNames.add(fieldName); this.fieldPositionMap.put(fieldName, this.size() - 1); }
public void remove(final HCatFieldSchema hcatFieldSchema) throws HCatException { if (!fieldSchemas.contains(hcatFieldSchema)) { throw new HCatException("Attempt to delete a non-existent column from HCat Schema: " + hcatFieldSchema); } fieldSchemas.remove(hcatFieldSchema); // Re-align the positionMap by -1 for the columns appearing after hcatFieldSchema. String fieldName = normalizeName(hcatFieldSchema.getName()); reAlignPositionMap(fieldPositionMap.get(fieldName)+1, -1); fieldPositionMap.remove(fieldName); fieldNames.remove(fieldName); }
@Override public List<String> listDatabaseNamesByPattern(String pattern) throws HCatException { List<String> dbNames = null; try { dbNames = hmsClient.getDatabases(pattern); } catch (MetaException exp) { throw new HCatException("MetaException while listing db names. " + exp.getMessage(), exp); } catch (TException e) { throw new HCatException("Transport Exception while listing db names. " + e.getMessage(), e); } return dbNames; }
@Override public String getMessageBusTopicName(String dbName, String tableName) throws HCatException { try { return hmsClient.getTable(dbName, tableName).getParameters().get( HCatConstants.HCAT_MSGBUS_TOPIC_NAME); } catch (MetaException e) { throw new HCatException("MetaException while retrieving JMS Topic name.", e); } catch (NoSuchObjectException e) { throw new HCatException("Could not find DB:" + dbName + " or Table:" + tableName, e); } catch (TException e) { throw new ConnectionFailureException( "TException while retrieving JMS Topic name.", e); } }
@Override public void cancelDelegationToken(String tokenStrForm) throws HCatException { try { hmsClient.cancelDelegationToken(tokenStrForm); } catch (MetaException e) { throw new HCatException( "MetaException while canceling delegation token.", e); } catch (TException e) { throw new ConnectionFailureException( "TException while canceling delegation token.", e); } }
@Override public long renewDelegationToken(String tokenStrForm) throws HCatException { long time = 0; try { time = hmsClient.renewDelegationToken(tokenStrForm); } catch (MetaException e) { throw new HCatException( "MetaException while renewing delegation token.", e); } catch (TException e) { throw new ConnectionFailureException( "TException while renewing delegation token.", e); } return time; }
@Override public String getDelegationToken(String owner, String renewerKerberosPrincipalName) throws HCatException { String token = null; try { token = hmsClient.getDelegationToken(owner, renewerKerberosPrincipalName); } catch (MetaException e) { throw new HCatException( "MetaException while getting delegation token.", e); } catch (TException e) { throw new ConnectionFailureException( "TException while getting delegation token.", e); } return token; }
@Override public HCatTable deserializeTable(String hcatTableStringRep) throws HCatException { try { Table table = new Table(); new TDeserializer(new TJSONProtocol.Factory()).deserialize(table, hcatTableStringRep, "UTF-8"); return new HCatTable(table); } catch(TException exception) { if (LOG.isDebugEnabled()) LOG.debug("Could not de-serialize from: " + hcatTableStringRep); throw new HCatException("Could not de-serialize HCatTable.", exception); } }
@Override public HCatPartition deserializePartition(String hcatPartitionStringRep) throws HCatException { try { Partition partition = new Partition(); new TDeserializer(new TJSONProtocol.Factory()).deserialize(partition, hcatPartitionStringRep, "UTF-8"); return new HCatPartition(null, partition); } catch(TException exception) { if (LOG.isDebugEnabled()) LOG.debug("Could not de-serialize partition from: " + hcatPartitionStringRep); throw new HCatException("Could not de-serialize HCatPartition.", exception); } }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
@Override @InterfaceAudience.LimitedPrivate({"Hive"}) @InterfaceStability.Evolving public List<String> serializePartitionSpec(HCatPartitionSpec hcatPartitionSpec) throws HCatException { try { List<String> stringReps = new ArrayList<String>(); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); for (PartitionSpec partitionSpec : hcatPartitionSpec.partitionSpecProxy.toPartitionSpec()) { stringReps.add(serializer.toString(partitionSpec, "UTF-8")); } return stringReps; } catch (TException serializationException) { throw new HCatException("Failed to serialize!", serializationException); } }
@Override public String serializeTable(HCatTable hcatTable) throws HCatException { try { return new TSerializer(new TJSONProtocol.Factory()) .toString(hcatTable.toHiveTable(), "UTF-8"); } catch (TException exception) { throw new HCatException("Could not serialize HCatTable: " + hcatTable, exception); } }
@Override public String serializePartition(HCatPartition hcatPartition) throws HCatException { try { return new TSerializer(new TJSONProtocol.Factory()) .toString(hcatPartition.toHivePartition(), "UTF-8"); } catch (TException exception) { throw new HCatException("Could not serialize HCatPartition: " + hcatPartition, exception); } }
@Override public void abort(WriterContext context) throws HCatException { WriterContextImpl cntxtImpl = (WriterContextImpl)context; try { new HCatOutputFormat().getOutputCommitter( ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( cntxtImpl.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())) .abortJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext( cntxtImpl.getConf(), null), State.FAILED); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
HCatPartition(HCatTable hcatTable, Partition partition) throws HCatException { this.hcatTable = hcatTable; this.tableName = partition.getTableName(); this.dbName = partition.getDbName(); this.createTime = partition.getCreateTime(); this.lastAccessTime = partition.getLastAccessTime(); this.parameters = partition.getParameters(); this.values = partition.getValues(); if (hcatTable != null && partition.getValuesSize() != hcatTable.getPartCols().size()) { throw new HCatException("Mismatched number of partition columns between table:" + hcatTable.getDbName() + "." + hcatTable.getTableName() + " and partition " + partition.getValues()); } this.sd = partition.getSd(); this.columns = getColumns(this.sd); }