/** * @param new_part * @return the added partition * @throws InvalidObjectException * @throws AlreadyExistsException * @throws MetaException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partition(org.apache.hadoop.hive.metastore.api.Partition) */ @Override public Partition add_partition(Partition new_part) throws TException { return add_partition(new_part, null); }
Partition retp = client.add_partition(part); assertNotNull("Unable to create partition " + part, retp); Partition retp2 = client.add_partition(part2); assertNotNull("Unable to create partition " + part2, retp2); Partition retp3 = client.add_partition(part3); assertNotNull("Unable to create partition " + part3, retp3); Partition retp4 = client.add_partition(part4); assertNotNull("Unable to create partition " + part4, retp4); part6.getSd().setCols(null); LOG.info("Creating partition will null field schema"); client.add_partition(part6); LOG.info("Listing all partitions for table " + dbName + "." + tblName); final List<Partition> partitions = client.listPartitions(dbName, tblName, (short) -1); retp = client.add_partition(part); assertNotNull("Unable to create partition " + part, retp); tbl.getParameters().put("EXTERNAL", "TRUE"); client.createTable(tbl); retp = client.add_partition(part); assertTrue(fs.exists(partPath)); client.dropPartition(dbName, tblName, part.getValues(), true);
/** * Hive.loadPartition() calls this. * @param partition * The partition to add * @return the partition added */ @Override public org.apache.hadoop.hive.metastore.api.Partition add_partition( org.apache.hadoop.hive.metastore.api.Partition partition) throws TException { // First try temp table org.apache.hadoop.hive.metastore.api.Table table = getTempTable(partition.getDbName(), partition.getTableName()); if (table == null) { //(assume) not a temp table - Try underlying client return super.add_partition(partition); } TempTable tt = getTempTable(table); if(tt == null) { throw new IllegalStateException("TempTable not found for " + Warehouse.getQualifiedName(table)); } tt.addPartition(deepCopy(partition)); return partition; }
private static void populatePartitions(HiveMetaStoreClient hmsc, Table table, List<String> blurbs) throws Exception { for (int i=0; i< nDates; ++i) { for (String blurb : blurbs) { StorageDescriptor sd = new StorageDescriptor(table.getSd()); // Add partitions located in the table-directory (i.e. default). List<String> values = Arrays.asList(datePrefix + i, blurb); sd.setLocation(getPartitionPath(table, values)); hmsc.add_partition(new Partition(values, dbName, tableName, 0, 0, sd, null)); } } }
private List<Partition> createPartitions(String dbName, Table tbl, List<List<String>> values) throws Throwable { int i = 1; List<Partition> partitions = new ArrayList<>(); for(List<String> vals : values) { Partition part = makePartitionObject(dbName, tbl.getTableName(), vals, tbl, "/part"+i); i++; // check if the partition exists (it shouldn't) boolean exceptionThrown = false; try { Partition p = client.getPartition(dbName, tbl.getTableName(), vals); } catch(Exception e) { assertEquals("partition should not have existed", NoSuchObjectException.class, e.getClass()); exceptionThrown = true; } assertTrue("getPartition() should have thrown NoSuchObjectException", exceptionThrown); Partition retp = client.add_partition(part); assertNotNull("Unable to create partition " + part, retp); partitions.add(retp); } return partitions; }
msc.add_partition(partition, envContext); listSize++; assertEquals(notifyList.size(), listSize);
private void add_partition(HiveMetaStoreClient client, Table table, List<String> vals, String location) throws TException { Partition part = new Partition(); part.setDbName(table.getDbName()); part.setTableName(table.getTableName()); part.setValues(vals); part.setParameters(new HashMap<>()); part.setSd(table.getSd().deepCopy()); part.getSd().setSerdeInfo(table.getSd().getSerdeInfo()); part.getSd().setLocation(table.getSd().getLocation() + location); client.add_partition(part); }
.addValue("2011") .build(conf); msc.add_partition(part); Map<String,String> kvs = new HashMap<>(); kvs.put("b", "'2011'");
private void addPartition(HiveMetaStoreClient client, Table table, List<String> vals, String location) throws TException { Partition part = new Partition(); part.setDbName(table.getDbName()); part.setTableName(table.getTableName()); part.setValues(vals); part.setParameters(new HashMap<String, String>()); part.setSd(table.getSd().deepCopy()); part.getSd().setSerdeInfo(table.getSd().getSerdeInfo()); part.getSd().setLocation(table.getSd().getLocation() + location); client.add_partition(part); } }
part.setParameters(new HashMap<>()); client.add_partition(part);
part.getSd().setLocation(tbl.getSd().getLocation() + "/part1"); client.add_partition(part);
part.getSd().getSerdeInfo().getParameters().put("abc", "1"); client.add_partition(part);
/** * @param new_part * @return the added partition * @throws InvalidObjectException * @throws AlreadyExistsException * @throws MetaException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partition(org.apache.hadoop.hive.metastore.api.Partition) */ public Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { return add_partition(new_part, null); }
/** * @param new_part * @return the added partition * @throws InvalidObjectException * @throws AlreadyExistsException * @throws MetaException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partition(org.apache.hadoop.hive.metastore.api.Partition) */ @Override public Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { return add_partition(new_part, null); }
/** * @param new_part * @return the added partition * @throws InvalidObjectException * @throws AlreadyExistsException * @throws MetaException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partition(org.apache.hadoop.hive.metastore.api.Partition) */ @Override public Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { return add_partition(new_part, null); }
client.add_partition(partition);