@InterfaceAudience.Developer("Test") public class TestUtil {
@InterfaceAudience.Developer("stats") @InterfaceStability.Evolving public class TaskStatistics implements Serializable {
@InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public abstract class DataMapWriter {
@InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public class FineGrainBlocklet extends Blocklet implements Serializable {
@InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public class DataMapRegistry {
@InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public class DataMapMeta {
/** * Factory for {@link FineGrainDataMap} * * 1. Any filter query which hits the table with datamap will call prune method of FGdatamap. * 2. The prune method of FGDatamap return list FineGrainBlocklet , these blocklets contain the * information of block, blocklet, page and rowids information as well. * 3. The pruned blocklets are internally wriitten to file and returns only the block , * blocklet and filepath information as part of Splits. * 4. Based on the splits scanrdd schedule the tasks. * 5. In filterscanner we check the datamapwriterpath from split and reNoteads the * bitset if exists. And pass this bitset as input to it. */ @InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public abstract class FineGrainDataMapFactory extends DataMapFactory<FineGrainDataMap> { public FineGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) { super(carbonTable, dataMapSchema); } @Override public DataMapLevel getDataMapLevel() { return DataMapLevel.FG; } }
/** * Factory for {@link CoarseGrainDataMap} * 1. Any filter query which hits the table with datamap will call prune method of CGdatamap. * 2. The prune method of CGDatamap return list Blocklet , these blocklets contain the * information of block and blocklet. * 3. Based on the splits scanrdd schedule the tasks. */ @InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public abstract class CoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDataMap> { public CoarseGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) { super(carbonTable, dataMapSchema); } @Override public DataMapLevel getDataMapLevel() { return DataMapLevel.CG; } }
/** * DataMapBuilder is used to implement REBUILD DATAMAP command, it reads all existing * data in main table and load them into the DataMap. All existing index data will be deleted * if there are existing data in the datamap. */ @InterfaceAudience.Developer("DataMap") public interface DataMapBuilder { void initialize() throws IOException; void addRow(int blockletId, int pageId, int rowId, Object[] values) throws IOException; void finish() throws IOException; void close() throws IOException; /** * whether create index on internal carbon bytes (such as dictionary encoded) or original value */ boolean isIndexForCarbonRawBytes(); }
/** * DataMap for Fine Grain level, see {@link org.apache.carbondata.core.datamap.DataMapLevel#FG} */ @InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public abstract class FineGrainDataMap implements DataMap<FineGrainBlocklet> { @Override public List<FineGrainBlocklet> prune(Expression filter, SegmentProperties segmentProperties, List<PartitionSpec> partitions, AbsoluteTableIdentifier identifier) throws IOException { throw new UnsupportedOperationException("Filter expression not supported"); } }
/** * DataMap for Coarse Grain level, see {@link org.apache.carbondata.core.datamap.DataMapLevel#CG} */ @InterfaceAudience.Developer("DataMap") @InterfaceStability.Evolving public abstract class CoarseGrainDataMap implements DataMap<Blocklet> { @Override public List<Blocklet> prune(Expression expression, SegmentProperties segmentProperties, List<PartitionSpec> partitions, AbsoluteTableIdentifier identifier) throws IOException { throw new UnsupportedOperationException("Filter expression not supported"); } }