public BlockletDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) { super(carbonTable, dataMapSchema); this.identifier = carbonTable.getAbsoluteTableIdentifier(); cache = CacheProvider.getInstance() .createCache(CacheType.DRIVER_BLOCKLET_DATAMAP); }
public static Dictionary getDictionary(DictionaryColumnUniqueIdentifier columnIdentifier) throws IOException { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY); return dictCache.get(columnIdentifier); }
CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider .createCache(CacheType.FORWARD_DICTIONARY); List<Dictionary> columnDictionaryList = forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
public BloomCoarseGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) throws MalformedDataMapCommandException { super(carbonTable, dataMapSchema); Objects.requireNonNull(carbonTable); Objects.requireNonNull(dataMapSchema); this.dataMapName = dataMapSchema.getDataMapName(); List<CarbonColumn> indexedColumns = carbonTable.getIndexedColumns(dataMapSchema); this.bloomFilterSize = validateAndGetBloomFilterSize(dataMapSchema); this.bloomFilterFpp = validateAndGetBloomFilterFpp(dataMapSchema); this.bloomCompress = validateAndGetBloomCompress(dataMapSchema); List<ExpressionType> optimizedOperations = new ArrayList<ExpressionType>(); // todo: support more optimize operations optimizedOperations.add(ExpressionType.EQUALS); optimizedOperations.add(ExpressionType.IN); this.dataMapMeta = new DataMapMeta(this.dataMapName, indexedColumns, optimizedOperations); LOGGER.info(String.format("DataMap %s works for %s with bloom size %d", this.dataMapName, this.dataMapMeta, this.bloomFilterSize)); try { this.cache = CacheProvider.getInstance() .createCache(new CacheType("bloom_cache"), BloomDataMapCache.class.getName()); } catch (Exception e) { LOGGER.error(e); throw new MalformedDataMapCommandException(e.getMessage()); } }
/** * This method will remove dictionary cache from driver for both reverse and forward dictionary * * @param carbonTableIdentifier * @param columnId */ public static void removeDictionaryColumnFromCache(AbsoluteTableIdentifier carbonTableIdentifier, String columnId) { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY); DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier(carbonTableIdentifier, new ColumnIdentifier(columnId, null, null)); dictCache.invalidate(dictionaryColumnUniqueIdentifier); dictCache = CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY); dictCache.invalidate(dictionaryColumnUniqueIdentifier); } }
private void initializeAtFirstRow() throws IOException { filterValues = new Object[carbonTable.getDimensionOrdinalMax() + measureCount]; filterRow = new RowImpl(); filterRow.setValues(filterValues); outputValues = new Object[projection.length]; outputRow = new GenericInternalRow(outputValues); Path file = fileSplit.getPath(); byte[] syncMarker = getSyncMarker(file.toString()); FileSystem fs = file.getFileSystem(hadoopConf); int bufferSize = Integer.parseInt(hadoopConf.get(CarbonStreamInputFormat.READ_BUFFER_SIZE, CarbonStreamInputFormat.READ_BUFFER_SIZE_DEFAULT)); FSDataInputStream fileIn = fs.open(file, bufferSize); fileIn.seek(fileSplit.getStart()); input = new StreamBlockletReader(syncMarker, fileIn, fileSplit.getLength(), fileSplit.getStart() == 0, compressorName); cacheProvider = CacheProvider.getInstance(); cache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY); queryTypes = CarbonStreamInputFormat.getComplexDimensions(carbonTable, storageColumns, cache); outputSchema = new StructType((StructField[]) DataTypeUtil.getDataTypeConverter().convertCarbonSchemaToSparkSchema(projection)); }
if (isDictExists) { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance() .createCache(CacheType.REVERSE_DICTIONARY); dictionary = dictCache.get(identifier);
.createCache(CacheType.REVERSE_DICTIONARY); for (int i = 0; i < set.length; i++) { ColumnIdentifier columnIdentifier =
CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY);
/** * This initialization is done inside executor task * for column dictionary involved in decoding. * * @param carbonColumns column list * @param carbonTable table identifier */ @Override public void initialize(CarbonColumn[] carbonColumns, CarbonTable carbonTable) throws IOException { this.carbonColumns = carbonColumns; dictionaries = new Dictionary[carbonColumns.length]; dataTypes = new DataType[carbonColumns.length]; for (int i = 0; i < carbonColumns.length; i++) { if (carbonColumns[i].hasEncoding(Encoding.DICTIONARY) && !carbonColumns[i] .hasEncoding(Encoding.DIRECT_DICTIONARY) && !carbonColumns[i].isComplex()) { CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider .createCache(CacheType.FORWARD_DICTIONARY); dataTypes[i] = carbonColumns[i].getDataType(); String dictionaryPath = carbonTable.getTableInfo().getFactTable().getTableProperties() .get(CarbonCommonConstants.DICTIONARY_PATH); dictionaries[i] = forwardDictionaryCache.get(new DictionaryColumnUniqueIdentifier( carbonTable.getAbsoluteTableIdentifier(), carbonColumns[i].getColumnIdentifier(), dataTypes[i], dictionaryPath)); } else { dataTypes[i] = carbonColumns[i].getDataType(); } } }
cacheProvider.createCache(CacheType.REVERSE_DICTIONARY);
CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> cache = cacheProvider.createCache(CacheType.REVERSE_DICTIONARY); Dictionary dictionary = null; if (useOnePass) {