public HiveLockObject(DummyPartition par, HiveLockObjectData lockData) { this(new String[] {par.getName()}, lockData); }
public Entity(DummyPartition p, boolean complete) { d = null; this.p = p; t = p.getTable(); typ = Type.DUMMYPARTITION; name = computeName(); this.complete = complete; }
public DummyPartition(Table tbl, String name) throws HiveException { setTable(tbl); this.name = name; }
private WriteEntity addDynamicPartitionedOutput(Table t, WriteEntity.WriteType writeType) throws Exception { DummyPartition dp = new DummyPartition(t, "no clue what I should call this"); WriteEntity we = new WriteEntity(dp, writeType, false); writeEntities.add(we); return we; } }
return new HiveLockObject(new DummyPartition(tab, path, partSpec), data);
return new HiveLockObject(new DummyPartition(tab, null, partSpec), data);
public String getCompleteName() { return getName(); }
public Entity(DummyPartition p, boolean complete) { d = null; this.p = p; t = p.getTable(); typ = Type.DUMMYPARTITION; name = computeName(); this.complete = complete; }
public DummyPartition(Table tbl, String name) throws HiveException { setTable(tbl); this.name = name; }
return new HiveLockObject(new DummyPartition(tab, path, partSpec), data);
public String getCompleteName() { return getName(); }
@Override public List<String> getValues() { List<String> values = new ArrayList<String>(); for (FieldSchema fs : this.getTable().getPartCols()) { values.add(partSpec.get(fs.getName())); } return values; }
public DummyPartition(Table tbl, String name, Map<String, String> partSpec) throws HiveException { setTable(tbl); this.name = name; this.partSpec = new LinkedHashMap<String, String>(partSpec); }
return new HiveLockObject(new DummyPartition(tab, null, partSpec), data);
public HiveLockObject(DummyPartition par, HiveLockObjectData lockData) { this(new String[] {par.getName()}, lockData); }
@Override public List<String> getValues() { List<String> values = new ArrayList<String>(); for (FieldSchema fs : this.getTable().getPartCols()) { values.add(partSpec.get(fs.getName())); } return values; }
public DummyPartition(Table tbl, String name, Map<String, String> partSpec) throws HiveException { setTable(tbl); this.name = name; this.partSpec = new LinkedHashMap<String, String>(partSpec); }
try { locks.add(new HiveLockObj( new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName() + "/" + org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(p.getTable().getTableName()) + "/" + partialName,
public String getCompleteName() { return getName(); } }
public WriteEntity(DummyPartition p, boolean complete) { d = null; this.p = p; t = p.getTable(); typ = Type.DUMMYPARTITION; name = computeName(); this.complete = complete; }