congrats Icon
New! Tabnine Pro 14-day free trial
Start a free trial
Tabnine Logo
Cache
Code IndexAdd Tabnine to your IDE (free)

How to use
Cache
in
org.apache.carbondata.core.cache

Best Java code snippets using org.apache.carbondata.core.cache.Cache (Showing top 20 results out of 315)

origin: org.apache.carbondata/carbondata-hadoop

/**
 * This method will get the value for the given key. If value does not exist
 * for the given key, it will check and load the value.
 *
 * @param key
 * @return
 * @throws IOException in case memory is not sufficient to load data into memory
 */
public V get(K key) throws IOException {
 V value = cache.get(key);
 if (value != null) {
  segmentSet.add(key);
 }
 return value;
}
origin: org.apache.carbondata/carbondata-hadoop

/**
 * This method will remove the cache for a given key
 *
 * @param keys
 */
public void invalidateAll(List<K> keys) {
 for (K key : keys) {
  cache.invalidate(key);
 }
}
origin: org.apache.carbondata/carbondata-hadoop

/**
 * This method will return the value for the given key. It will not check and load
 * the data for the given key
 *
 * @param key
 * @return
 */
public V getIfPresent(K key) {
 V value = cache.getIfPresent(key);
 if (value != null) {
  segmentSet.add(key);
 }
 return value;
}
origin: org.apache.carbondata/carbondata-core

@Override
public void clear(Segment segment) {
 Set<TableBlockIndexUniqueIdentifier> blockIndexes = segmentMap.remove(segment.getSegmentNo());
 if (blockIndexes != null) {
  for (TableBlockIndexUniqueIdentifier blockIndex : blockIndexes) {
   TableBlockIndexUniqueIdentifierWrapper blockIndexWrapper =
     new TableBlockIndexUniqueIdentifierWrapper(blockIndex, this.getCarbonTable());
   BlockletDataMapIndexWrapper wrapper = cache.getIfPresent(blockIndexWrapper);
   if (null != wrapper) {
    List<BlockDataMap> dataMaps = wrapper.getDataMaps();
    for (DataMap dataMap : dataMaps) {
     if (dataMap != null) {
      cache.invalidate(blockIndexWrapper);
      dataMap.clear();
     }
    }
   }
  }
 }
}
origin: org.apache.carbondata/carbondata-core

@Override public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException {
 List<CoarseGrainDataMap> dataMaps = new ArrayList<>();
 Set<TableBlockIndexUniqueIdentifier> identifiers =
   getTableBlockIndexUniqueIdentifiers(segment);
 List<TableBlockIndexUniqueIdentifierWrapper> tableBlockIndexUniqueIdentifierWrappers =
   new ArrayList<>(identifiers.size());
 for (TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier : identifiers) {
  tableBlockIndexUniqueIdentifierWrappers.add(
    new TableBlockIndexUniqueIdentifierWrapper(tableBlockIndexUniqueIdentifier,
      this.getCarbonTable()));
 }
 List<BlockletDataMapIndexWrapper> blockletDataMapIndexWrappers =
   cache.getAll(tableBlockIndexUniqueIdentifierWrappers);
 for (BlockletDataMapIndexWrapper wrapper : blockletDataMapIndexWrappers) {
  dataMaps.addAll(wrapper.getDataMaps());
 }
 return dataMaps;
}
origin: org.apache.carbondata/carbondata-hadoop

/**
 * This method will clear the access count for a given list of segments
 *
 * @param segmentList
 */
public void clearAccessCount(List<K> segmentList) {
 cache.clearAccessCount(segmentList);
 // remove from segment set so that access count is not decremented again during close operation
 segmentSet.removeAll(segmentList);
}
origin: org.apache.carbondata/carbondata-core

@Override
public void cache(TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper,
  BlockletDataMapIndexWrapper blockletDataMapIndexWrapper) throws IOException, MemoryException {
 cache.put(tableBlockIndexUniqueIdentifierWrapper, blockletDataMapIndexWrapper);
}
origin: org.apache.carbondata/carbondata-core

  .createCache(CacheType.FORWARD_DICTIONARY);
List<Dictionary> columnDictionaryList =
  forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
Map<String, Dictionary> columnDictionaryMap = new HashMap<>(columnDictionaryList.size());
for (int i = 0; i < dictionaryColumnUniqueIdentifiers.size(); i++) {
origin: org.apache.carbondata/carbondata-hadoop

/**
 * the method is used to clear access count of the unused segments cacheable object
 */
public void close() {
 List<K> segmentArrayList = new ArrayList<>(segmentSet.size());
 segmentArrayList.addAll(segmentSet);
 cache.clearAccessCount(segmentArrayList);
 cache = null;
}
origin: org.apache.carbondata/carbondata-processing

public static Dictionary getDictionary(DictionaryColumnUniqueIdentifier columnIdentifier)
  throws IOException {
 Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache =
   CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY);
 return dictCache.get(columnIdentifier);
}
origin: org.apache.carbondata/carbondata-bloom

@Override
public void clear(Segment segment) {
 Set<String> shards = segmentMap.remove(segment.getSegmentNo());
 if (shards != null) {
  for (String shard : shards) {
   for (CarbonColumn carbonColumn : dataMapMeta.getIndexedColumns()) {
    cache.invalidate(new BloomCacheKeyValue.CacheKey(shard, carbonColumn.getColName()));
   }
  }
 }
}
origin: org.apache.carbondata/carbondata-core

 @Override
 public List<DataMapDistributable> getAllUncachedDistributables(
   List<DataMapDistributable> distributables) throws IOException {
  List<DataMapDistributable> distributablesToBeLoaded = new ArrayList<>(distributables.size());
  for (DataMapDistributable distributable : distributables) {
   Segment segment = distributable.getSegment();
   Set<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers =
     getTableBlockIndexUniqueIdentifiers(segment);
   // filter out the tableBlockIndexUniqueIdentifiers based on distributable
   TableBlockIndexUniqueIdentifier validIdentifier = BlockletDataMapUtil
     .filterIdentifiersBasedOnDistributable(tableBlockIndexUniqueIdentifiers,
       (BlockletDataMapDistributable) distributable);
   if (null == cache.getIfPresent(
     new TableBlockIndexUniqueIdentifierWrapper(validIdentifier, this.getCarbonTable()))) {
    ((BlockletDataMapDistributable) distributable)
      .setTableBlockIndexUniqueIdentifier(validIdentifier);
    distributablesToBeLoaded.add(distributable);
   }
  }
  return distributablesToBeLoaded;
 }
}
origin: org.apache.carbondata/carbondata-core

  cache.getAll(tableBlockIndexUniqueIdentifierWrappers);
for (BlockletDataMapIndexWrapper wrapper : blockletDataMapIndexWrappers) {
 Segment segment = segmentMap.get(wrapper.getSegmentId());
origin: org.apache.carbondata/carbondata-core

private ExtendedBlocklet getExtendedBlocklet(
  Set<TableBlockIndexUniqueIdentifierWrapper> identifiersWrapper, Blocklet blocklet)
  throws IOException {
 for (TableBlockIndexUniqueIdentifierWrapper identifierWrapper : identifiersWrapper) {
  BlockletDataMapIndexWrapper wrapper = cache.get(identifierWrapper);
  List<BlockDataMap> dataMaps = wrapper.getDataMaps();
  for (DataMap dataMap : dataMaps) {
   if (((BlockDataMap) dataMap)
     .getTableTaskInfo(BlockletDataMapRowIndexes.SUMMARY_INDEX_FILE_NAME)
     .startsWith(blocklet.getFilePath())) {
    return ((BlockDataMap) dataMap).getDetailedBlocklet(blocklet.getBlockletId());
   }
  }
 }
 throw new IOException("Blocklet not found: " + blocklet.toString());
}
origin: org.apache.carbondata/carbondata-core

 /**
  * This method will remove dictionary cache from driver for both reverse and forward dictionary
  *
  * @param carbonTableIdentifier
  * @param columnId
  */
 public static void removeDictionaryColumnFromCache(AbsoluteTableIdentifier carbonTableIdentifier,
   String columnId) {
  Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache =
    CacheProvider.getInstance().createCache(CacheType.REVERSE_DICTIONARY);
  DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier =
    new DictionaryColumnUniqueIdentifier(carbonTableIdentifier,
      new ColumnIdentifier(columnId, null, null));
  dictCache.invalidate(dictionaryColumnUniqueIdentifier);
  dictCache = CacheProvider.getInstance().createCache(CacheType.FORWARD_DICTIONARY);
  dictCache.invalidate(dictionaryColumnUniqueIdentifier);
 }
}
origin: org.apache.carbondata/carbondata-core

List<BlockletDataMapIndexWrapper> wrappers = cache.getAll(identifiersWrapper);
for (BlockletDataMapIndexWrapper wrapper : wrappers) {
 dataMaps.addAll(wrapper.getDataMaps());
origin: org.apache.carbondata/carbondata-bloom

BloomCacheKeyValue.CacheValue cacheValue = cache.get(cacheKey);
List<CarbonBloomFilter> bloomIndexList = cacheValue.getBloomFilters();
for (CarbonBloomFilter bloomFilter : bloomIndexList) {
origin: org.apache.carbondata/carbondata-hadoop

Dictionary dict = (Dictionary) dictCache.get(
  new DictionaryColumnUniqueIdentifier(absoluteTableIdentifier,
    columnIdentifier, dims.get(i).getDataType()));
origin: org.apache.carbondata/carbondata-core

Cache<DictionaryColumnUniqueIdentifier, Dictionary> dictCache = CacheProvider.getInstance()
    .createCache(CacheType.REVERSE_DICTIONARY);
dictionary = dictCache.get(identifier);
origin: org.apache.carbondata/carbondata-processing

  dictionary = cache.get(identifier);
   dictionaryMessage, localCache);
} else {
 dictionary = cache.get(identifier);
 dictionaryGenerator = new PreCreatedDictionary(dictionary);
org.apache.carbondata.core.cacheCache

Javadoc

A semi-persistent mapping from keys to values. Cache entries are manually added using #get(Key), #getAll(List) , and are stored in the cache until either evicted or manually invalidated. Implementations of this interface are expected to be thread-safe, and can be safely accessed by multiple concurrent threads. This class also responsible for incrementing and decrementing access count during get operation

Most used methods

  • get
    This method will get the value for the given key. If value does not exist for the given key, it will
  • invalidate
    This method will remove the cache for a given key
  • getIfPresent
    This method will return the value for the given key. It will not check and load the data for the giv
  • clearAccessCount
    Access count of Cacheable entry will be decremented
  • getAll
    This method will return a list of values for the given list of keys. For each key, this method will
  • put
    This method will add the value to the cache for the given key

Popular in Java

  • Running tasks concurrently on multiple threads
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • putExtra (Intent)
  • setScale (BigDecimal)
  • ObjectMapper (com.fasterxml.jackson.databind)
    ObjectMapper provides functionality for reading and writing JSON, either to and from basic POJOs (Pl
  • LinkedHashMap (java.util)
    LinkedHashMap is an implementation of Map that guarantees iteration order. All optional operations a
  • TimerTask (java.util)
    The TimerTask class represents a task to run at a specified time. The task may be run once or repeat
  • Filter (javax.servlet)
    A filter is an object that performs filtering tasks on either the request to a resource (a servlet o
  • JTextField (javax.swing)
  • Base64 (org.apache.commons.codec.binary)
    Provides Base64 encoding and decoding as defined by RFC 2045.This class implements section 6.8. Base
  • Top 17 Free Sublime Text Plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now