/** * Constructor for a partition. * * @param p * Partition that is read or written to. */ public Entity(Partition p, boolean complete) { d = null; this.p = p; t = p.getTable(); typ = Type.PARTITION; name = computeName(); this.complete = complete; }
public static Partish buildFor(Partition part) { return new PPart(part.getTable(), part); }
/** * @return the owner of the corresponding hive table */ public Optional<String> getOwner() { return Optional.fromNullable(this.hivePartition.getTable().getOwner()); } }
public PartitionDesc(final Partition part) throws HiveException { this(part, getTableDesc(part.getTable())); }
@Override public LongWatermark getPreviousHighWatermark(Partition partition) { return getPreviousHighWatermark(partition.getTable()); }
@Override public LongWatermark getExpectedHighWatermark(Partition partition, long tableProcessTime, long partitionProcessTime) { return getExpectedHighWatermark(partition.getTable(), tableProcessTime); }
public static String getPartitionInformation(Partition part) { StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); // Table Metadata tableInfo.append(LINE_DELIM).append("# Detailed Partition Information").append(LINE_DELIM); getPartitionMetaDataInformation(tableInfo, part); // Storage information. if (part.getTable().getTableType() != TableType.VIRTUAL_VIEW) { tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM); getStorageDescriptorInfo(tableInfo, part.getTPartition().getSd()); } return tableInfo.toString(); }
/** * @return include the db name */ public String getCompleteName() { return getTable().getCompleteName() + "@" + getName(); }
public static String getPartitionInformation(Partition part) { StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE); // Table Metadata tableInfo.append(LINE_DELIM).append("# Detailed Partition Information").append(LINE_DELIM); getPartitionMetaDataInformation(tableInfo, part); // Storage information. if (part.getTable().getTableType() != TableType.VIRTUAL_VIEW) { tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM); getStorageDescriptorInfo(tableInfo, part.getTPartition().getSd()); } return tableInfo.toString(); }
/** * @return include the db name */ public String getCompleteName() { return getTable().getCompleteName() + "@" + getName(); }
private static Partition mockPartition(Table table, List<String> values) { Partition partition = Mockito.mock(Partition.class, Mockito.RETURNS_SMART_NULLS); Mockito.when(partition.getTable()).thenReturn(table); Mockito.when(partition.getValues()).thenReturn(values); return partition; } }
private static Partition mockPartition(Table table, List<String> values) { Partition partition = Mockito.mock(Partition.class, Mockito.RETURNS_SMART_NULLS); Mockito.when(partition.getTable()).thenReturn(table); Mockito.when(partition.getValues()).thenReturn(values); return partition; }
/** * Get the expected high watermark for this partition * {@inheritDoc} * @see org.apache.gobblin.data.management.conversion.hive.watermarker.HiveSourceWatermarker#getExpectedHighWatermark(org.apache.hadoop.hive.ql.metadata.Partition, long, long) */ @Override public LongWatermark getExpectedHighWatermark(Partition partition, long tableProcessTime, long partitionProcessTime) { return new LongWatermark(this.expectedHighWatermarks.getPartitionWatermark(tableKey(partition.getTable()), partitionKey(partition))); }
private static boolean needsLock(Entity entity) { switch (entity.getType()) { case TABLE: return isLockableTable(entity.getTable()); case PARTITION: return isLockableTable(entity.getPartition().getTable()); default: return true; } }
public HiveWorkUnit(HiveDataset hiveDataset, Partition partition) { this(hiveDataset); setPartitionName(partition.getName()); setPartitionLocation(partition.getLocation()); setPartitionKeys(partition.getTable().getPartitionKeys()); }
public PartitionDesc(final Partition part) throws HiveException { PartitionDescConstructorHelper(part, getTableDesc(part.getTable()), true); if (Utilities.isInputFileFormatSelfDescribing(this)) { // if IF is self describing no need to send column info per partition, since its not used anyway. Table tbl = part.getTable(); setProperties(MetaStoreUtils.getSchemaWithoutCols(part.getTPartition().getSd(), part.getTPartition().getSd(), part.getParameters(), tbl.getDbName(), tbl.getTableName(), tbl.getPartitionKeys())); } else { setProperties(part.getMetadataFromPartitionSchema()); } }
private static void getPartitionMetaDataInformation(StringBuilder tableInfo, Partition part) { formatOutput("Partition Value:", part.getValues().toString(), tableInfo); formatOutput("Database:", part.getTPartition().getDbName(), tableInfo); formatOutput("Table:", part.getTable().getTableName(), tableInfo); formatOutput("CreateTime:", formatDate(part.getTPartition().getCreateTime()), tableInfo); formatOutput("LastAccessTime:", formatDate(part.getTPartition().getLastAccessTime()), tableInfo); formatOutput("Location:", part.getLocation(), tableInfo); if (part.getTPartition().getParameters().size() > 0) { tableInfo.append("Partition Parameters:").append(LINE_DELIM); displayAllParameters(part.getTPartition().getParameters(), tableInfo); } }
public PartitionDesc(final Partition part, final TableDesc tableDesc) throws HiveException { PartitionDescConstructorHelper(part, tableDesc, true); if (Utilities.isInputFileFormatSelfDescribing(this)) { // if IF is self describing no need to send column info per partition, since its not used anyway. Table tbl = part.getTable(); setProperties(MetaStoreUtils.getSchemaWithoutCols(part.getTPartition().getSd(), part.getParameters(), tbl.getDbName(), tbl.getTableName(), tbl.getPartitionKeys())); } else { setProperties(part.getMetadataFromPartitionSchema()); } }