@Override public void readFields(DataInput in) throws IOException { super.readFields(in); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); }
public static ColumnSpec newInstance(String fieldName, DataType schemaDataType, ColumnType columnType) { return new ColumnSpec(fieldName, schemaDataType, columnType); }
@Override public byte[] getChunkData(int rowId) { byte[] nullBitSet = getNullBitSet(rowId, columnPage.getColumnSpec().getColumnType()); if (nullBitSet != null) { // if this row is null, return default null represent in byte array return nullBitSet; } else { if (isExplicitSorted()) { rowId = getInvertedReverseIndex(rowId); } return getChunkDataInBytes(rowId); } }
private RLEDecoder(TableSpec.ColumnSpec columnSpec, int pageSize, String compressorName) { validateDataType(columnSpec.getSchemaDataType()); this.columnSpec = columnSpec; this.pageSize = pageSize; this.compressorName = compressorName; }
@Override public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks, boolean useBitsetPipeLine) throws IOException { int numberOfRows = rawBlockletColumnChunks.getDataBlock().numRows(); return FilterUtil .createBitSetGroupWithDefaultValue(rawBlockletColumnChunks.getDataBlock().numberOfPages(), numberOfRows, !isDefaultValuePresentInFilterValues); }
/** * Check whether the column is sort column and inverted index column * * @param isComplexPrimitive * @param columnSpec * @return */ private static boolean isInvertedIndex(boolean isComplexPrimitive, TableSpec.ColumnSpec columnSpec) { boolean isSort; boolean isInvertedIndex = false; if (columnSpec instanceof TableSpec.DimensionSpec && !isComplexPrimitive) { isSort = ((TableSpec.DimensionSpec) columnSpec).isInSortColumns(); isInvertedIndex = isSort && ((TableSpec.DimensionSpec) columnSpec).isDoInvertedIndex(); } return isInvertedIndex; }
@Override public void close() { if (null != dataBlockIterator) { dataBlockIterator.close(); } try { fileReader.finish(); } catch (IOException e) { LOGGER.error(e); } }
@Override public BitSet prunePages(RawBlockletColumnChunks rawBlockletColumnChunks) throws FilterUnsupportedException, IOException { int numberOfPages = rawBlockletColumnChunks.getDataBlock().numberOfPages(); BitSet bitSet = new BitSet(numberOfPages); bitSet.set(0, numberOfPages); return bitSet; }
public DataWriterProcessorStepImpl(CarbonDataLoadConfiguration configuration, AbstractDataLoadProcessorStep child) { super(configuration, child); this.localDictionaryGeneratorMap = CarbonUtil.getLocalDictionaryModel(configuration.getTableSpec().getCarbonTable()); }
/** * To get the next block * @return next data block * */ @Override public DataRefNode next() { // get the current blocks DataRefNode datablockTemp = datablock; // store the next data block datablock = datablock.getNextDataRefNode(); // increment the counter blockCounter++; // if all the data block is processed then // set the has next flag to false // or if number of blocks assigned to this iterator is processed // then also set the hasnext flag to false if (null == datablock || blockCounter >= this.totalNumberOfBlocksToScan) { hasNext = false; } return datablockTemp; } }
private void addMeasures(List<CarbonMeasure> measures) { for (int i = 0; i < measures.size(); i++) { CarbonMeasure measure = measures.get(i); measureSpec[i] = new MeasureSpec(measure.getColName(), measure.getDataType()); } }
protected void readBlockDataChunk(RawBlockletColumnChunks blockChunkHolder) throws IOException { if (null == blockChunkHolder.getDimensionRawColumnChunks()[blockIndex]) { blockChunkHolder.getDimensionRawColumnChunks()[blockIndex] = blockChunkHolder.getDataBlock() .readDimensionChunk(blockChunkHolder.getFileReader(), blockIndex); } } }
public DataType getSchemaDataType() { return columnSpec.getSchemaDataType(); }
@Override public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks, boolean useBitsetPipeLine) throws IOException { int numberOfRows = rawBlockletColumnChunks.getDataBlock().numRows(); return FilterUtil.createBitSetGroupWithDefaultValue( rawBlockletColumnChunks.getDataBlock().numberOfPages(), numberOfRows, isDefaultValuePresentInFilterValues); }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); }
public static ColumnSpec newInstanceLegacy(String fieldName, DataType schemaDataType, ColumnType columnType) { // for backward compatibility as the precision and scale is not stored, the values should be // initialized with -1 for both precision and scale if (schemaDataType instanceof DecimalType) { ((DecimalType) schemaDataType).setPrecision(-1); ((DecimalType) schemaDataType).setScale(-1); } return new ColumnSpec(fieldName, schemaDataType, columnType); }
@Override public BitSet prunePages(RawBlockletColumnChunks rawBlockletColumnChunks) throws FilterUnsupportedException, IOException { int numberOfPages = rawBlockletColumnChunks.getDataBlock().numberOfPages(); BitSet bitSet = new BitSet(numberOfPages); bitSet.set(0, numberOfPages); return bitSet; }
public DataWriterProcessorStepImpl(CarbonDataLoadConfiguration configuration) { super(configuration, null); this.localDictionaryGeneratorMap = CarbonUtil.getLocalDictionaryModel(configuration.getTableSpec().getCarbonTable()); }