public BlockletDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) { super(carbonTable, dataMapSchema); this.identifier = carbonTable.getAbsoluteTableIdentifier(); cache = CacheProvider.getInstance() .createCache(CacheType.DRIVER_BLOCKLET_DATAMAP); }
/** * This method will check if a cache already exists for given cache type and create in case * it is not present in the map * * @param cacheType type of cache * @param <K> * @param <V> * @return */ public <K, V> Cache<K, V> createCache(CacheType cacheType) { //check if lru cache is null, if null create one //check if cache is null for given cache type, if null create one if (!dictionaryCacheAlreadyExists(cacheType)) { synchronized (lock) { if (!dictionaryCacheAlreadyExists(cacheType)) { if (null == carbonLRUCache) { createLRULevelCacheInstance(); } createDictionaryCacheForGivenType(cacheType); } } } return cacheTypeToCacheMap.get(cacheType); }
/** * This method will check if a cache already exists for given cache type and store * if it is not present in the map */ public <K, V> Cache<K, V> createCache(CacheType cacheType, String cacheClassName) throws Exception { //check if lru cache is null, if null create one //check if cache is null for given cache type, if null create one if (!dictionaryCacheAlreadyExists(cacheType)) { synchronized (lock) { if (!dictionaryCacheAlreadyExists(cacheType)) { if (null == carbonLRUCache) { createLRULevelCacheInstance(); } Class<?> clazz = Class.forName(cacheClassName); Constructor<?> constructor = clazz.getConstructors()[0]; constructor.setAccessible(true); Cache cacheObject = (Cache) constructor.newInstance(carbonLRUCache); cacheTypeToCacheMap.put(cacheType, cacheObject); } } } return cacheTypeToCacheMap.get(cacheType); }
public static Dictionary getDictionary(DictionaryColumnUniqueIdentifier columnIdentifier) throws IOException { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY); return dictCache.get(columnIdentifier); }
/** * This method will remove dictionary cache from driver for both reverse and forward dictionary * * @param carbonTableIdentifier * @param columnId */ public static void removeDictionaryColumnFromCache(AbsoluteTableIdentifier carbonTableIdentifier, String columnId) { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY); DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier = new DictionaryColumnUniqueIdentifier(carbonTableIdentifier, new ColumnIdentifier(columnId, null, null)); dictCache.invalidate(dictionaryColumnUniqueIdentifier); dictCache = CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY); dictCache.invalidate(dictionaryColumnUniqueIdentifier); } }
CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider .createCache(CacheType.FORWARD_DICTIONARY); List<Dictionary> columnDictionaryList = forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
public BloomCoarseGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) throws MalformedDataMapCommandException { super(carbonTable, dataMapSchema); Objects.requireNonNull(carbonTable); Objects.requireNonNull(dataMapSchema); this.dataMapName = dataMapSchema.getDataMapName(); List<CarbonColumn> indexedColumns = carbonTable.getIndexedColumns(dataMapSchema); this.bloomFilterSize = validateAndGetBloomFilterSize(dataMapSchema); this.bloomFilterFpp = validateAndGetBloomFilterFpp(dataMapSchema); this.bloomCompress = validateAndGetBloomCompress(dataMapSchema); List<ExpressionType> optimizedOperations = new ArrayList<ExpressionType>(); // todo: support more optimize operations optimizedOperations.add(ExpressionType.EQUALS); optimizedOperations.add(ExpressionType.IN); this.dataMapMeta = new DataMapMeta(this.dataMapName, indexedColumns, optimizedOperations); LOGGER.info(String.format("DataMap %s works for %s with bloom size %d", this.dataMapName, this.dataMapMeta, this.bloomFilterSize)); try { this.cache = CacheProvider.getInstance() .createCache(new CacheType("bloom_cache"), BloomDataMapCache.class.getName()); } catch (Exception e) { LOGGER.error(e); throw new MalformedDataMapCommandException(e.getMessage()); } }
private void initializeAtFirstRow() throws IOException { filterValues = new Object[carbonTable.getDimensionOrdinalMax() + measureCount]; filterRow = new RowImpl(); filterRow.setValues(filterValues); outputValues = new Object[projection.length]; outputRow = new GenericInternalRow(outputValues); Path file = fileSplit.getPath(); byte[] syncMarker = getSyncMarker(file.toString()); FileSystem fs = file.getFileSystem(hadoopConf); int bufferSize = Integer.parseInt(hadoopConf.get(CarbonStreamInputFormat.READ_BUFFER_SIZE, CarbonStreamInputFormat.READ_BUFFER_SIZE_DEFAULT)); FSDataInputStream fileIn = fs.open(file, bufferSize); fileIn.seek(fileSplit.getStart()); input = new StreamBlockletReader(syncMarker, fileIn, fileSplit.getLength(), fileSplit.getStart() == 0, compressorName); cacheProvider = CacheProvider.getInstance(); cache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY); queryTypes = CarbonStreamInputFormat.getComplexDimensions(carbonTable, storageColumns, cache); outputSchema = new StructType((StructField[]) DataTypeUtil.getDataTypeConverter().convertCarbonSchemaToSparkSchema(projection)); }
long t1 = System.currentTimeMillis(); if (isDictExists) { Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance() .createCache(CacheType.REVERSE_DICTIONARY); dictionary = dictCache.get(identifier);
Cache dictCache = CacheProvider.getInstance() .createCache(CacheType.REVERSE_DICTIONARY); for (int i = 0; i < set.length; i++) { ColumnIdentifier columnIdentifier =
new DictionaryColumnUniqueIdentifier(dictionarySourceAbsoluteTableIdentifier, columnIdentifier, carbonDimension.getDataType(), dictionaryPath); CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY);
/** * This initialization is done inside executor task * for column dictionary involved in decoding. * * @param carbonColumns column list * @param carbonTable table identifier */ @Override public void initialize(CarbonColumn[] carbonColumns, CarbonTable carbonTable) throws IOException { this.carbonColumns = carbonColumns; dictionaries = new Dictionary[carbonColumns.length]; dataTypes = new DataType[carbonColumns.length]; for (int i = 0; i < carbonColumns.length; i++) { if (carbonColumns[i].hasEncoding(Encoding.DICTIONARY) && !carbonColumns[i] .hasEncoding(Encoding.DIRECT_DICTIONARY) && !carbonColumns[i].isComplex()) { CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider .createCache(CacheType.FORWARD_DICTIONARY); dataTypes[i] = carbonColumns[i].getDataType(); String dictionaryPath = carbonTable.getTableInfo().getFactTable().getTableProperties() .get(CarbonCommonConstants.DICTIONARY_PATH); dictionaries[i] = forwardDictionaryCache.get(new DictionaryColumnUniqueIdentifier( carbonTable.getAbsoluteTableIdentifier(), carbonColumns[i].getColumnIdentifier(), dataTypes[i], dictionaryPath)); } else { dataTypes[i] = carbonColumns[i].getDataType(); } } }
this.isEmptyBadRecord = isEmptyBadRecord; CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> cache = cacheProvider.createCache(CacheType.REVERSE_DICTIONARY);
isDirectDictionary = true; } else if (carbonDimension.hasEncoding(Encoding.DICTIONARY)) { CacheProvider cacheProvider = CacheProvider.getInstance(); Cache<DictionaryColumnUniqueIdentifier, Dictionary> cache = cacheProvider.createCache(CacheType.REVERSE_DICTIONARY); Dictionary dictionary = null; if (useOnePass) {