Tabnine Logo
CarbonColumn.isComplex
Code IndexAdd Tabnine to your IDE (free)

How to use
isComplex
method
in
org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn

Best Java code snippets using org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn.isComplex (Showing top 15 results out of 315)

origin: org.apache.carbondata/carbondata-core

/**
 * This method will give storage order column list
 */
public List<CarbonColumn> getStreamStorageOrderColumn(String tableName) {
 List<CarbonDimension> dimensions = tableDimensionsMap.get(tableName);
 List<CarbonMeasure> measures = tableMeasuresMap.get(tableName);
 List<CarbonColumn> columnList = new ArrayList<>(dimensions.size() + measures.size());
 List<CarbonColumn> complexDimensionList = new ArrayList<>(dimensions.size());
 for (CarbonColumn column : dimensions) {
  if (column.isComplex()) {
   complexDimensionList.add(column);
  } else {
   columnList.add(column);
  }
 }
 columnList.addAll(complexDimensionList);
 for (CarbonColumn column : measures) {
  if (!(column.getColName().equals("default_dummy_measure"))) {
   columnList.add(column);
  }
 }
 return columnList;
}
origin: org.apache.carbondata/carbondata-processing

public static boolean[] getNoDictionaryMapping(CarbonColumn[] carbonColumns) {
 List<Boolean> noDictionaryMapping = new ArrayList<Boolean>();
 for (CarbonColumn column : carbonColumns) {
  // for  complex type need to break the loop
  if (column.isComplex()) {
   break;
  }
  if (!column.hasEncoding(Encoding.DICTIONARY) && column.isDimension()) {
   noDictionaryMapping.add(true);
  } else if (column.isDimension()) {
   noDictionaryMapping.add(false);
  }
 }
 return ArrayUtils
   .toPrimitive(noDictionaryMapping.toArray(new Boolean[noDictionaryMapping.size()]));
}
origin: org.apache.carbondata/carbondata-processing

/**
 * Preparing the boolean [] to map whether the dimension is no Dictionary or not.
 */
public static boolean[] getNoDictionaryMapping(DataField[] fields) {
 List<Boolean> noDictionaryMapping = new ArrayList<Boolean>();
 for (DataField field : fields) {
  // for  complex type need to break the loop
  if (field.getColumn().isComplex()) {
   break;
  }
  if (!field.hasDictionaryEncoding() && field.getColumn().isDimension()) {
   noDictionaryMapping.add(true);
  } else if (field.getColumn().isDimension()) {
   noDictionaryMapping.add(false);
  }
 }
 return ArrayUtils
   .toPrimitive(noDictionaryMapping.toArray(new Boolean[noDictionaryMapping.size()]));
}
origin: org.apache.carbondata/carbondata-processing

/**
 * Preparing the boolean [] to map whether the dimension is varchar data type or not.
 */
public static boolean[] getIsVarcharColumnMapping(DataField[] fields) {
 List<Boolean> isVarcharColumnMapping = new ArrayList<Boolean>();
 for (DataField field : fields) {
  // for complex type need to break the loop
  if (field.getColumn().isComplex()) {
   break;
  }
  if (field.getColumn().isDimension()) {
   isVarcharColumnMapping.add(
     field.getColumn().getColumnSchema().getDataType() == DataTypes.VARCHAR);
  }
 }
 return ArrayUtils.toPrimitive(
   isVarcharColumnMapping.toArray(new Boolean[isVarcharColumnMapping.size()]));
}
origin: org.apache.carbondata/carbondata-processing

public static void getComplexNoDictionaryMapping(DataField[] dataFields,
  List<Integer> complexNoDictionary) {
 // save the Ordinal Number in the List.
 for (DataField field : dataFields) {
  if (field.getColumn().isComplex()) {
   // get the childs.
   getComplexNoDictionaryMapping(
     ((CarbonDimension) field.getColumn()).getListOfChildDimensions(), complexNoDictionary);
  }
 }
}
origin: org.apache.carbondata/carbondata-processing

/**
 * Get the data types of the no dictionary and the complex dimensions of the table
 *
 * @return
 */
public CarbonColumn[] getNoDictAndComplexDimensions() {
 List<Integer> noDicOrCompIndexes = new ArrayList<>(dataFields.length);
 int noDicCount = 0;
 for (int i = 0; i < dataFields.length; i++) {
  if (dataFields[i].getColumn().isDimension() && (
    !(dataFields[i].getColumn().hasEncoding(Encoding.DICTIONARY)) || dataFields[i].getColumn()
      .isComplex())) {
   noDicOrCompIndexes.add(i);
   noDicCount++;
  }
 }
 CarbonColumn[] dims = new CarbonColumn[noDicCount];
 for (int i = 0; i < dims.length; i++) {
  dims[i] = dataFields[noDicOrCompIndexes.get(i)].getColumn();
 }
 return dims;
}
origin: org.apache.carbondata/carbondata-processing

private void convertComplexDataType(Map<Integer, GenericDataType> dataFieldsWithComplexDataType) {
 DataField[] srcDataField = configuration.getDataFields();
 FieldEncoderFactory fieldConverterFactory = FieldEncoderFactory.getInstance();
 String nullFormat =
   configuration.getDataLoadProperty(DataLoadProcessorConstants.SERIALIZATION_NULL_FORMAT)
     .toString();
 boolean isEmptyBadRecord = Boolean.parseBoolean(
   configuration.getDataLoadProperty(DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD)
     .toString());
 for (int i = 0; i < srcDataField.length; i++) {
  if (srcDataField[i].getColumn().isComplex()) {
   // create a ComplexDataType
   dataFieldsWithComplexDataType.put(srcDataField[i].getColumn().getOrdinal(),
     fieldConverterFactory
       .createComplexDataType(srcDataField[i], configuration.getTableIdentifier(), null,
         false, null, i, nullFormat, isEmptyBadRecord));
  }
 }
}
origin: org.apache.carbondata/carbondata-processing

public void setDataFields(DataField[] dataFields) {
 this.dataFields = dataFields;
 // set counts for each column category
 for (DataField dataField : dataFields) {
  CarbonColumn column = dataField.getColumn();
  if (column.isDimension()) {
   dimensionCount++;
   if (column.isComplex()) {
    if (!dataField.hasDictionaryEncoding()) {
     complexNonDictionaryColumnCount++;
    } else {
     complexDictionaryColumnCount++;
    }
   } else if (!dataField.hasDictionaryEncoding()) {
    noDictionaryCount++;
   }
  }
  if (column.isMeasure()) {
   measureCount++;
  }
 }
}
origin: org.apache.carbondata/carbondata-streaming

public static GenericQueryType[] getComplexDimensions(CarbonTable carbontable,
  CarbonColumn[] carbonColumns, Cache<DictionaryColumnUniqueIdentifier, Dictionary> cache)
  throws IOException {
 GenericQueryType[] queryTypes = new GenericQueryType[carbonColumns.length];
 for (int i = 0; i < carbonColumns.length; i++) {
  if (carbonColumns[i].isComplex()) {
   if (DataTypes.isArrayType(carbonColumns[i].getDataType())) {
    queryTypes[i] = new ArrayQueryType(carbonColumns[i].getColName(),
      carbonColumns[i].getColName(), i);
   } else if (DataTypes.isStructType(carbonColumns[i].getDataType())) {
    queryTypes[i] = new StructQueryType(carbonColumns[i].getColName(),
      carbonColumns[i].getColName(), i);
   } else {
    throw new UnsupportedOperationException(
      carbonColumns[i].getDataType().getName() + " is not supported");
   }
   fillChildren(carbontable, queryTypes[i], (CarbonDimension) carbonColumns[i], i, cache);
  }
 }
 return queryTypes;
}
origin: org.apache.carbondata/carbondata-hadoop

/**
 * This initialization is done inside executor task
 * for column dictionary involved in decoding.
 *
 * @param carbonColumns column list
 * @param carbonTable table identifier
 */
@Override public void initialize(CarbonColumn[] carbonColumns,
  CarbonTable carbonTable) throws IOException {
 this.carbonColumns = carbonColumns;
 dictionaries = new Dictionary[carbonColumns.length];
 dataTypes = new DataType[carbonColumns.length];
 for (int i = 0; i < carbonColumns.length; i++) {
  if (carbonColumns[i].hasEncoding(Encoding.DICTIONARY) && !carbonColumns[i]
    .hasEncoding(Encoding.DIRECT_DICTIONARY) && !carbonColumns[i].isComplex()) {
   CacheProvider cacheProvider = CacheProvider.getInstance();
   Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider
     .createCache(CacheType.FORWARD_DICTIONARY);
   dataTypes[i] = carbonColumns[i].getDataType();
   String dictionaryPath = carbonTable.getTableInfo().getFactTable().getTableProperties()
     .get(CarbonCommonConstants.DICTIONARY_PATH);
   dictionaries[i] = forwardDictionaryCache.get(new DictionaryColumnUniqueIdentifier(
     carbonTable.getAbsoluteTableIdentifier(),
     carbonColumns[i].getColumnIdentifier(), dataTypes[i], dictionaryPath));
  } else {
   dataTypes[i] = carbonColumns[i].getDataType();
  }
 }
}
origin: org.apache.carbondata/carbondata-processing

CarbonColumn column = dataField.getColumn();
if (!dataField.hasDictionaryEncoding()) {
 if (!column.isComplex() && column.getDataType() == DataTypes.VARCHAR) {
  varcharDimIdxInNoDict.add(noDictionayDimensionIndex);
origin: org.apache.carbondata/carbondata-processing

  !dataField.getColumn().isComplex()) {
 return new DirectDictionaryFieldConverterImpl(dataField, nullFormat, index,
   isEmptyBadRecord);
} else if (dataField.getColumn().hasEncoding(Encoding.DICTIONARY) &&
  !dataField.getColumn().isComplex()) {
 DictionaryColumnUniqueIdentifier identifier = null;
    nullFormat, index, null, false, null, isEmptyBadRecord, identifier);
} else if (dataField.getColumn().isComplex()) {
 return new ComplexFieldConverterImpl(
   createComplexDataType(dataField, absoluteTableIdentifier,
origin: org.apache.carbondata/carbondata-processing

 column.setTimestampFormat(loadModel.getTimestampformat());
if (column.isComplex()) {
 complexDataFields.add(dataField);
 List<CarbonDimension> childDimensions =
origin: org.apache.carbondata/carbondata-core

condExpression = (ConditionalExpression) expression;
column = condExpression.getColumnList().get(0).getCarbonColumn();
if (condExpression.isSingleColumn() && ! column.isComplex()) {
 condExpression = (ConditionalExpression) expression;
 if ((condExpression.getColumnList().get(0).getCarbonColumn()
origin: org.apache.carbondata/carbondata-spark-datasource

 fields[i] = new StructField(carbonColumn.getColName(),
   convertCarbonToSparkDataType(carbonColumn.getDataType()), true, null);
} else if (carbonColumn.isComplex()) {
 fields[i] = new StructField(carbonColumn.getColName(),
   convertCarbonToSparkDataType(carbonColumn.getDataType()), true, null);
org.apache.carbondata.core.metadata.schema.table.columnCarbonColumnisComplex

Popular methods of CarbonColumn

  • getDataType
  • getColName
  • hasEncoding
  • getColumnSchema
  • isDimension
  • isMeasure
  • getOrdinal
  • getColumnId
  • getColumnIdentifier
  • <init>
  • getColumnProperties
  • getDefaultValue
  • getColumnProperties,
  • getDefaultValue,
  • getEncoder,
  • getSchemaOrdinal,
  • isInvisible,
  • isUseInvertedIndex,
  • setDateFormat,
  • setTimestampFormat,
  • setUseActualData

Popular in Java

  • Running tasks concurrently on multiple threads
  • addToBackStack (FragmentTransaction)
  • notifyDataSetChanged (ArrayAdapter)
  • setRequestProperty (URLConnection)
  • Pointer (com.sun.jna)
    An abstraction for a native pointer data type. A Pointer instance represents, on the Java side, a na
  • OutputStream (java.io)
    A writable sink for bytes.Most clients will use output streams that write data to the file system (
  • ByteBuffer (java.nio)
    A buffer for bytes. A byte buffer can be created in either one of the following ways: * #allocate
  • KeyStore (java.security)
    KeyStore is responsible for maintaining cryptographic keys and their owners. The type of the syste
  • IOUtils (org.apache.commons.io)
    General IO stream manipulation utilities. This class provides static utility methods for input/outpu
  • Join (org.hibernate.mapping)
  • Best plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now