Tabnine Logo
CarbonColumn.hasEncoding
Code IndexAdd Tabnine to your IDE (free)

How to use
hasEncoding
method
in
org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn

Best Java code snippets using org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn.hasEncoding (Showing top 13 results out of 315)

origin: org.apache.carbondata/carbondata-processing

public boolean hasDictionaryEncoding() {
 return column.hasEncoding(Encoding.DICTIONARY);
}
origin: org.apache.carbondata/carbondata-processing

public static boolean[] getNoDictionaryMapping(CarbonColumn[] carbonColumns) {
 List<Boolean> noDictionaryMapping = new ArrayList<Boolean>();
 for (CarbonColumn column : carbonColumns) {
  // for  complex type need to break the loop
  if (column.isComplex()) {
   break;
  }
  if (!column.hasEncoding(Encoding.DICTIONARY) && column.isDimension()) {
   noDictionaryMapping.add(true);
  } else if (column.isDimension()) {
   noDictionaryMapping.add(false);
  }
 }
 return ArrayUtils
   .toPrimitive(noDictionaryMapping.toArray(new Boolean[noDictionaryMapping.size()]));
}
origin: org.apache.carbondata/carbondata-bloom

protected void addValue2BloomIndex(int indexColIdx, Object value) {
 byte[] indexValue;
 // convert measure to bytes
 // convert non-dict dimensions to simple bytes without length
 // convert internal-dict dimensions to simple bytes without any encode
 if (indexColumns.get(indexColIdx).isMeasure()) {
  // NULL value of all measures are already processed in `ColumnPage.getData`
  // or `RawBytesReadSupport.readRow` with actual data type
  // Carbon stores boolean as byte. Here we convert it for `getValueAsBytes`
  if (indexColumns.get(indexColIdx).getDataType().equals(DataTypes.BOOLEAN)) {
   value = BooleanConvert.boolean2Byte((Boolean)value);
  }
  indexValue = CarbonUtil.getValueAsBytes(indexColumns.get(indexColIdx).getDataType(), value);
 } else {
  if (indexColumns.get(indexColIdx).hasEncoding(Encoding.DICTIONARY)
    || indexColumns.get(indexColIdx).hasEncoding(Encoding.DIRECT_DICTIONARY)) {
   indexValue = convertDictionaryValue(indexColIdx, value);
  } else {
   indexValue = convertNonDictionaryValue(indexColIdx, value);
  }
 }
 if (indexValue.length == 0) {
  indexValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY;
 }
 indexBloomFilters.get(indexColIdx).add(new Key(indexValue));
}
origin: org.apache.carbondata/carbondata-processing

/**
 * Get the data types of the no dictionary and the complex dimensions of the table
 *
 * @return
 */
public CarbonColumn[] getNoDictAndComplexDimensions() {
 List<Integer> noDicOrCompIndexes = new ArrayList<>(dataFields.length);
 int noDicCount = 0;
 for (int i = 0; i < dataFields.length; i++) {
  if (dataFields[i].getColumn().isDimension() && (
    !(dataFields[i].getColumn().hasEncoding(Encoding.DICTIONARY)) || dataFields[i].getColumn()
      .isComplex())) {
   noDicOrCompIndexes.add(i);
   noDicCount++;
  }
 }
 CarbonColumn[] dims = new CarbonColumn[noDicCount];
 for (int i = 0; i < dims.length; i++) {
  dims[i] = dataFields[noDicOrCompIndexes.get(i)].getColumn();
 }
 return dims;
}
origin: org.apache.carbondata/carbondata-spark

else if (carbonColumns[i].hasEncoding(Encoding.DIRECT_DICTIONARY)) {
origin: org.apache.carbondata/carbondata-hadoop

/**
 * This initialization is done inside executor task
 * for column dictionary involved in decoding.
 *
 * @param carbonColumns column list
 * @param carbonTable table identifier
 */
@Override public void initialize(CarbonColumn[] carbonColumns,
  CarbonTable carbonTable) throws IOException {
 this.carbonColumns = carbonColumns;
 dictionaries = new Dictionary[carbonColumns.length];
 dataTypes = new DataType[carbonColumns.length];
 for (int i = 0; i < carbonColumns.length; i++) {
  if (carbonColumns[i].hasEncoding(Encoding.DICTIONARY) && !carbonColumns[i]
    .hasEncoding(Encoding.DIRECT_DICTIONARY) && !carbonColumns[i].isComplex()) {
   CacheProvider cacheProvider = CacheProvider.getInstance();
   Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider
     .createCache(CacheType.FORWARD_DICTIONARY);
   dataTypes[i] = carbonColumns[i].getDataType();
   String dictionaryPath = carbonTable.getTableInfo().getFactTable().getTableProperties()
     .get(CarbonCommonConstants.DICTIONARY_PATH);
   dictionaries[i] = forwardDictionaryCache.get(new DictionaryColumnUniqueIdentifier(
     carbonTable.getAbsoluteTableIdentifier(),
     carbonColumns[i].getColumnIdentifier(), dataTypes[i], dictionaryPath));
  } else {
   dataTypes[i] = carbonColumns[i].getDataType();
  }
 }
}
origin: org.apache.carbondata/carbondata-spark2

directDictionaryGenerators = new DirectDictionaryGenerator[storageColumns.length];
for (int i = 0; i < storageColumns.length; i++) {
 if (storageColumns[i].hasEncoding(Encoding.DIRECT_DICTIONARY)) {
  directDictionaryGenerators[i] = DirectDictionaryKeyGeneratorFactory
    .getDirectDictionaryGenerator(storageColumns[i].getDataType());
origin: org.apache.carbondata/carbondata-bloom

} else if (carbonColumn.hasEncoding(Encoding.DIRECT_DICTIONARY) ||
  carbonColumn.hasEncoding(Encoding.DICTIONARY)) {
origin: org.apache.carbondata/carbondata-processing

@Override public void initialize() throws IOException {
 super.initialize();
 // if logger is enabled then raw data will be required.
 RowConverterImpl rowConverter =
   new RowConverterImpl(configuration.getDataFields(), configuration, null);
 rowConverter.initialize();
 configuration.setCardinalityFinder(rowConverter);
 noDictionaryMapping =
   CarbonDataProcessorUtil.getNoDictionaryMapping(configuration.getDataFields());
 dataFieldsWithComplexDataType = new HashMap<>();
 convertComplexDataType(dataFieldsWithComplexDataType);
 dataTypes = new DataType[configuration.getDataFields().length];
 for (int i = 0; i < dataTypes.length; i++) {
  if (configuration.getDataFields()[i].getColumn().hasEncoding(Encoding.DICTIONARY)) {
   dataTypes[i] = DataTypes.INT;
  } else {
   dataTypes[i] = configuration.getDataFields()[i].getColumn().getDataType();
  }
 }
 orderOfData = arrangeData(configuration.getDataFields(), configuration.getHeader());
}
origin: org.apache.carbondata/carbondata-processing

    carbonColumn.hasEncoding(Encoding.DICTIONARY));
for (CarbonDimension dimension : listOfChildDimensions) {
 arrayDataType.addChildren(
    carbonColumn.hasEncoding(Encoding.DICTIONARY));
for (CarbonDimension dimension : dimensions) {
 structDataType.addChildren(
origin: org.apache.carbondata/carbondata-core

CarbonColumn carbonColumn =
  currentCondExpression.getColumnList().get(0).getCarbonColumn();
if (carbonColumn.hasEncoding(Encoding.IMPLICIT)) {
 return new ConditionalFilterResolverImpl(expression, isExpressionResolve, true,
   currentCondExpression.getColumnList().get(0).getCarbonColumn().isMeasure());
  .hasEncoding(Encoding.DICTIONARY) || currentCondExpression.getColumnList().get(0)
  .getCarbonColumn().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
 if (FilterUtil.checkIfExpressionContainsColumn(currentCondExpression.getLeft())
   && FilterUtil.checkIfExpressionContainsColumn(currentCondExpression.getRight()) || (
  .hasEncoding(Encoding.DICTIONARY) || currentCondExpression.getColumnList().get(0)
  .getCarbonColumn().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
 if (FilterUtil.checkIfExpressionContainsColumn(currentCondExpression.getLeft())
   && FilterUtil.checkIfExpressionContainsColumn(currentCondExpression.getRight()) || (
 condExpression = (ConditionalExpression) expression;
 if ((condExpression.getColumnList().get(0).getCarbonColumn()
   .hasEncoding(Encoding.DICTIONARY) && !condExpression.getColumnList().get(0)
   .getCarbonColumn().hasEncoding(Encoding.DIRECT_DICTIONARY))
   || (condExpression.getColumnList().get(0).getCarbonColumn().isMeasure())) {
  return new ConditionalFilterResolverImpl(expression, true, true,
origin: org.apache.carbondata/carbondata-spark-datasource

CarbonColumn carbonColumn = carbonColumns[i];
if (carbonColumn.isDimension()) {
 if (carbonColumn.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
  DirectDictionaryGenerator generator = DirectDictionaryKeyGeneratorFactory
    .getDirectDictionaryGenerator(carbonColumn.getDataType());
  fields[i] = new StructField(carbonColumn.getColName(),
    convertCarbonToSparkDataType(generator.getReturnType()), true, null);
 } else if (!carbonColumn.hasEncoding(Encoding.DICTIONARY)) {
  fields[i] = new StructField(carbonColumn.getColName(),
    convertCarbonToSparkDataType(carbonColumn.getDataType()), true, null);
origin: org.apache.carbondata/carbondata-processing

if (dataField.getColumn().hasEncoding(Encoding.DIRECT_DICTIONARY) &&
  !dataField.getColumn().isComplex()) {
 return new DirectDictionaryFieldConverterImpl(dataField, nullFormat, index,
   isEmptyBadRecord);
} else if (dataField.getColumn().hasEncoding(Encoding.DICTIONARY) &&
  !dataField.getColumn().isComplex()) {
 DictionaryColumnUniqueIdentifier identifier = null;
org.apache.carbondata.core.metadata.schema.table.columnCarbonColumnhasEncoding

Popular methods of CarbonColumn

  • getDataType
  • getColName
  • isComplex
  • getColumnSchema
  • isDimension
  • isMeasure
  • getOrdinal
  • getColumnId
  • getColumnIdentifier
  • <init>
  • getColumnProperties
  • getDefaultValue
  • getColumnProperties,
  • getDefaultValue,
  • getEncoder,
  • getSchemaOrdinal,
  • isInvisible,
  • isUseInvertedIndex,
  • setDateFormat,
  • setTimestampFormat,
  • setUseActualData

Popular in Java

  • Running tasks concurrently on multiple threads
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • runOnUiThread (Activity)
  • compareTo (BigDecimal)
  • InputStream (java.io)
    A readable source of bytes.Most clients will use input streams that read data from the file system (
  • RandomAccessFile (java.io)
    Allows reading from and writing to a file in a random-access manner. This is different from the uni-
  • Deque (java.util)
    A linear collection that supports element insertion and removal at both ends. The name deque is shor
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • ImageIO (javax.imageio)
  • StringUtils (org.apache.commons.lang)
    Operations on java.lang.String that arenull safe. * IsEmpty/IsBlank - checks if a String contains
  • Best IntelliJ plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now