congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
FilterResolverIntf
Code IndexAdd Tabnine to your IDE (free)

How to use
FilterResolverIntf
in
org.apache.carbondata.core.scan.filter.resolver

Best Java code snippets using org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf (Showing top 14 results out of 315)

origin: org.apache.carbondata/carbondata-core

private DataMapExprWrapper chooseDataMap(DataMapLevel level, FilterResolverIntf resolverIntf) {
 if (resolverIntf != null) {
  Expression expression = resolverIntf.getFilterExpression();
  List<TableDataMap> datamaps = level == DataMapLevel.CG ? cgDataMaps : fgDataMaps;
  if (datamaps.size() > 0) {
   ExpressionTuple tuple = selectDataMap(expression, datamaps, resolverIntf);
   if (tuple.dataMapExprWrapper != null) {
    return tuple.dataMapExprWrapper;
   }
  }
 }
 return null;
}
origin: org.apache.carbondata/carbondata-core

/**
 * check if current need to be replaced with TrueFilter expression. This will happen in case
 * filter column min/max is not cached in the driver
 *
 * @param filterExpressionResolverTree
 * @param segmentProperties
 * @param minMaxCacheColumns
 * @return
 */
private static boolean checkIfCurrentNodeToBeReplacedWithTrueFilterExpression(
  FilterResolverIntf filterExpressionResolverTree, SegmentProperties segmentProperties,
  List<CarbonColumn> minMaxCacheColumns) {
 boolean replaceCurrentNodeWithTrueFilter = false;
 ColumnResolvedFilterInfo columnResolvedFilterInfo = null;
 if (null != filterExpressionResolverTree.getMsrColResolvedFilterInfo()) {
  columnResolvedFilterInfo = filterExpressionResolverTree.getMsrColResolvedFilterInfo();
  replaceCurrentNodeWithTrueFilter =
    checkIfFilterColumnIsCachedInDriver(columnResolvedFilterInfo, segmentProperties,
      minMaxCacheColumns, true);
 } else {
  columnResolvedFilterInfo = filterExpressionResolverTree.getDimColResolvedFilterInfo();
  if (!columnResolvedFilterInfo.getDimension().hasEncoding(Encoding.IMPLICIT)) {
   replaceCurrentNodeWithTrueFilter =
     checkIfFilterColumnIsCachedInDriver(columnResolvedFilterInfo, segmentProperties,
       minMaxCacheColumns, false);
  }
 }
 return replaceCurrentNodeWithTrueFilter;
}
origin: org.apache.carbondata/carbondata-core

private static void traverseResolverTreeAndPopulateStartAndEndKeys(
  FilterResolverIntf filterResolverTree, SegmentProperties segmentProperties, long[] startKeys,
  SortedMap<Integer, byte[]> setOfStartKeyByteArray, long[] endKeys,
  SortedMap<Integer, byte[]> setOfEndKeyByteArray, List<long[]> startKeyList,
  List<long[]> endKeyList) {
 if (null == filterResolverTree) {
  return;
 }
 traverseResolverTreeAndPopulateStartAndEndKeys(filterResolverTree.getLeft(),
   segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray,
   startKeyList, endKeyList);
 filterResolverTree
   .getStartKey(segmentProperties, startKeys, setOfStartKeyByteArray, startKeyList);
 filterResolverTree.getEndKey(segmentProperties, endKeys, setOfEndKeyByteArray,
   endKeyList);
 traverseResolverTreeAndPopulateStartAndEndKeys(filterResolverTree.getRight(),
   segmentProperties, startKeys, setOfStartKeyByteArray, endKeys, setOfEndKeyByteArray,
   startKeyList, endKeyList);
}
origin: org.apache.carbondata/carbondata-core

/**
 * constructing the filter resolver tree based on filter expression.
 * this method will visit each node of the filter resolver and prepares
 * the surrogates of the filter members which are involved filter
 * expression.
 *
 * @param filterResolverTree
 * @param tableIdentifier
 */
private void traverseAndResolveTree(FilterResolverIntf filterResolverTree,
  AbsoluteTableIdentifier tableIdentifier)
  throws FilterUnsupportedException, IOException {
 if (null == filterResolverTree) {
  return;
 }
 traverseAndResolveTree(filterResolverTree.getLeft(), tableIdentifier);
 filterResolverTree.resolve(tableIdentifier);
 traverseAndResolveTree(filterResolverTree.getRight(), tableIdentifier);
}
origin: org.apache.carbondata/carbondata-core

 Map<Integer, GenericQueryType> complexDimensionInfoMap,
 List<CarbonColumn> minMaxCacheColumns) {
FilterExecuterType filterExecuterType = filterExpressionResolverTree.getFilterExecuterType();
if (null != filterExecuterType) {
 switch (filterExecuterType) {
  case INCLUDE:
   if (null != filterExpressionResolverTree.getDimColResolvedFilterInfo()
     && null != filterExpressionResolverTree.getDimColResolvedFilterInfo()
     .getFilterValues() && filterExpressionResolverTree.getDimColResolvedFilterInfo()
     .getFilterValues().isOptimized()) {
    return getExcludeFilterExecuter(
      filterExpressionResolverTree.getDimColResolvedFilterInfo(),
      filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
     filterExpressionResolverTree.getDimColResolvedFilterInfo(),
     filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
  case EXCLUDE:
   return getExcludeFilterExecuter(
     filterExpressionResolverTree.getDimColResolvedFilterInfo(),
     filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
  case OR:
   return new OrFilterExecuterImpl(
     createFilterExecuterTree(filterExpressionResolverTree.getLeft(), segmentProperties,
       complexDimensionInfoMap, minMaxCacheColumns),
     createFilterExecuterTree(filterExpressionResolverTree.getRight(), segmentProperties,
       complexDimensionInfoMap, minMaxCacheColumns));
  case AND:
   return new AndFilterExecuterImpl(
     createFilterExecuterTree(filterExpressionResolverTree.getLeft(), segmentProperties,
origin: org.apache.carbondata/carbondata-core

 AndExpression andExpression = (AndExpression) expression;
 ExpressionTuple left = selectDataMap(andExpression.getLeft(), allDataMap,
   filterResolverIntf.getLeft());
 ExpressionTuple right = selectDataMap(andExpression.getRight(), allDataMap,
   filterResolverIntf.getRight());
 Set<ExpressionType> filterExpressionTypes = new HashSet<>();
 OrExpression orExpression = (OrExpression) expression;
 ExpressionTuple left = selectDataMap(orExpression.getLeft(), allDataMap,
   filterResolverIntf.getLeft());
 ExpressionTuple right = selectDataMap(orExpression.getRight(), allDataMap,
   filterResolverIntf.getRight());
filterExpressionTypes.add(expression.getFilterExpressionType());
TrueConditionalResolverImpl resolver = new TrueConditionalResolverImpl(
  filterResolverIntf.getFilterExpression(), false,
  true);
TableDataMap dataMap =
 tuple.dataMapExprWrapper = new DataMapExprWrapperImpl(dataMap, resolver);
 tuple.filterExpressionTypes.addAll(filterExpressionTypes);
 tuple.expression = filterResolverIntf.getFilterExpression();
origin: org.apache.carbondata/carbondata-core

@Override
public String toString() {
 return String.format("scan on table %s.%s, %d projection columns with filter (%s)",
   table.getDatabaseName(), table.getTableName(),
   projection.getDimensions().size() + projection.getMeasures().size(),
   filterExpressionResolverTree.getFilterExpression().toString());
}
origin: org.apache.carbondata/carbondata-core

public static void getAllFilterDimensionsAndMeasures(FilterResolverIntf filterResolverTree,
  Set<CarbonDimension> filterDimensions, Set<CarbonMeasure> filterMeasure) {
 if (null == filterResolverTree) {
  return;
 }
 List<ColumnExpression> dimensionResolvedInfos = new ArrayList<ColumnExpression>();
 Expression filterExpression = filterResolverTree.getFilterExpression();
 addColumnDimensions(filterExpression, filterDimensions, filterMeasure);
 for (ColumnExpression info : dimensionResolvedInfos) {
  if (info.isDimension() && info.getDimension().getNumberOfChild() > 0) {
   filterDimensions.add(info.getDimension());
  }
 }
}
origin: org.apache.carbondata/carbondata-bloom

 bloomQueryModels = createQueryModel(filterExp.getFilterExpression());
} catch (DictionaryGenerationException | UnsupportedEncodingException e) {
 LOGGER.error("Exception occurs while creating query model", e);
origin: org.apache.carbondata/carbondata-lucene

String strQuery = getQueryString(filterExp.getFilterExpression());
int maxDocs;
try {
 maxDocs = getMaxDoc(filterExp.getFilterExpression());
} catch (NumberFormatException e) {
 maxDocs = Integer.MAX_VALUE;
origin: org.apache.carbondata/carbondata-spark-datasource

/**
 * Whether to use lazy load in vector or not.
 * @return
 */
private boolean isUseLazyLoad() {
 boolean useLazyLoad = false;
 if (queryModel.getFilterExpressionResolverTree() != null) {
  Expression expression =
    queryModel.getFilterExpressionResolverTree().getFilterExpression();
  useLazyLoad = true;
  // In case of join queries only not null filter would e pushed down so check and disable the
  // lazy load in that case.
  if (expression instanceof NotEqualsExpression) {
   try {
    if (((NotEqualsExpression) expression).getRight().evaluate(null).isNull()) {
     useLazyLoad = false;
    }
   } catch (Exception e) {
    throw new RuntimeException(e);
   }
  }
 }
 return useLazyLoad;
}
origin: org.apache.carbondata/carbondata-core

/**
 * Return a chosen datamap based on input filter. See {@link DataMapChooser}
 */
public DataMapExprWrapper choose(FilterResolverIntf filter) {
 if (filter != null) {
  Expression expression = filter.getFilterExpression();
  // First check for FG datamaps if any exist
  ExpressionTuple tuple = selectDataMap(expression, fgDataMaps, filter);
  if (tuple.dataMapExprWrapper == null) {
   // Check for CG datamap
   tuple = selectDataMap(expression, cgDataMaps, filter);
  }
  if (tuple.dataMapExprWrapper != null) {
   return tuple.dataMapExprWrapper;
  }
 }
 // Return the default datamap if no other datamap exists.
 return new DataMapExprWrapperImpl(
   DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable), filter);
}
origin: org.apache.carbondata/carbondata-spark2

private void initializeFilter() {
 List<ColumnSchema> wrapperColumnSchemaList = CarbonUtil
   .getColumnSchemaList(carbonTable.getDimensionByTableName(carbonTable.getTableName()),
     carbonTable.getMeasureByTableName(carbonTable.getTableName()));
 int[] dimLensWithComplex = new int[wrapperColumnSchemaList.size()];
 for (int i = 0; i < dimLensWithComplex.length; i++) {
  dimLensWithComplex[i] = Integer.MAX_VALUE;
 }
 int[] dictionaryColumnCardinality =
   CarbonUtil.getFormattedCardinality(dimLensWithComplex, wrapperColumnSchemaList);
 SegmentProperties segmentProperties =
   new SegmentProperties(wrapperColumnSchemaList, dictionaryColumnCardinality);
 Map<Integer, GenericQueryType> complexDimensionInfoMap = new HashMap<>();
 FilterResolverIntf resolverIntf = model.getFilterExpressionResolverTree();
 filter = FilterUtil.getFilterExecuterTree(resolverIntf, segmentProperties,
   complexDimensionInfoMap);
 // for row filter, we need update column index
 FilterUtil.updateIndexOfColumnExpression(resolverIntf.getFilterExpression(),
   carbonTable.getDimensionOrdinalMax());
}
origin: org.apache.carbondata/carbondata-core

((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
  .getMsrColEvalutorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
  .getMsrColEvalutorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
  .getMsrColEvalutorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
  .getMsrColEvalutorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
org.apache.carbondata.core.scan.filter.resolverFilterResolverIntf

Most used methods

  • getFilterExpression
  • getDimColResolvedFilterInfo
    API will return the resolved filter instance, this instance will provide the resolved surrogates bas
  • getEndKey
    API will read the end key based on the max surrogate of particular dimension column
  • getFilterExecuterType
    API will return the filter executer type which will be used to evaluate the resolved filter while qu
  • getLeft
    This API will provide the left column filter expression inorder to resolve the left expression filte
  • getMsrColResolvedFilterInfo
    API will return the resolved filter instance, this instance will provide the resolved surrogates bas
  • getRight
    API will provide the right column filter expression inorder to resolve the right expression filter.
  • getStartKey
    API will get the start key based on the filter applied based on the key generator
  • resolve
    This API will resolve the filter expression and generates the dictionaries for executing/evaluating

Popular in Java

  • Updating database using SQL prepared statement
  • runOnUiThread (Activity)
  • requestLocationUpdates (LocationManager)
  • scheduleAtFixedRate (ScheduledExecutorService)
  • FileWriter (java.io)
    A specialized Writer that writes to a file in the file system. All write requests made by calling me
  • Map (java.util)
    A Map is a data structure consisting of a set of keys and values in which each key is mapped to a si
  • ImageIO (javax.imageio)
  • JOptionPane (javax.swing)
  • Response (javax.ws.rs.core)
    Defines the contract between a returned instance and the runtime when an application needs to provid
  • Logger (org.slf4j)
    The org.slf4j.Logger interface is the main user entry point of SLF4J API. It is expected that loggin
  • From CI to AI: The AI layer in your organization
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now