/** * Below method will be used to convert the data passed to its actual data * type * * @param data data * @param actualDataType actual data type * @return actual data after conversion */ public static Object getDataBasedOnDataType(String data, DataType actualDataType) { return getDataBasedOnDataType(data, actualDataType, getDataTypeConverter()); }
@Override public Object getDataBasedOnDataType(ByteBuffer dataBuffer) { int childLength = dataBuffer.getShort(); Object[] fields = new Object[childLength]; for (int i = 0; i < childLength; i++) { fields[i] = children.get(i).getDataBasedOnDataType(dataBuffer); } return DataTypeUtil.getDataTypeConverter().wrapWithGenericRow(fields); }
@Override public Object getDataBasedOnDataType(ByteBuffer dataBuffer) { Object[] data = fillData(dataBuffer); if (data == null) { return null; } return DataTypeUtil.getDataTypeConverter().wrapWithGenericArrayData(data); }
/** * Method for computing default value for no dictionary * * @param defaultValue * @return */ private static Object getNoDictionaryDefaultValue(DataType datatype, byte[] defaultValue) { Object noDictionaryDefaultValue = null; String value = null; if (!isDefaultValueNull(defaultValue)) { if (datatype == DataTypes.INT) { value = new String(defaultValue, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); noDictionaryDefaultValue = Integer.parseInt(value); } else if (datatype == DataTypes.LONG) { value = new String(defaultValue, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); noDictionaryDefaultValue = Long.parseLong(value); } else if (datatype == DataTypes.TIMESTAMP) { long timestampValue = ByteUtil.toXorLong(defaultValue, 0, defaultValue.length); noDictionaryDefaultValue = timestampValue * 1000L; } else { noDictionaryDefaultValue = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8Bytes(defaultValue); } } return noDictionaryDefaultValue; }
/** * Map data is internally stored as Array<Struct<key,Value>>. So first the data is filled in the * stored format and then each record is separated out to fill key and value separately. This is * because for spark integration it expects the data as ArrayBasedMapData(keyArray, valueArray) * and for SDK it will be an object array in the same format as returned to spark * * @param dataBuffer * @return */ @Override public Object getDataBasedOnDataType(ByteBuffer dataBuffer) { Object[] data = fillData(dataBuffer); if (data == null) { return null; } Object[] keyArray = new Object[data.length]; Object[] valueArray = new Object[data.length]; for (int i = 0; i < data.length; i++) { Object[] keyValue = DataTypeUtil.getDataTypeConverter().unwrapGenericRowToObject(data[i]); keyArray[i] = keyValue[0]; valueArray[i] = keyValue[1]; } return DataTypeUtil.getDataTypeConverter().wrapWithArrayBasedMapData(keyArray, valueArray); }
/** * This method will convert the spark decimal to java big decimal type * * @param value * @param type * @return */ private Object getConvertedMeasureValue(Object value, DataType type) { if (DataTypes.isDecimal(type)) { if (value != null) { value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value); } return value; } else { return value; } }
javaDecVal = javaDecVal.setScale(dimension.getColumnSchema().getScale()); return getDataTypeConverter().convertFromBigDecimalToDecimal(javaDecVal); } else { return getDataTypeConverter().convertFromByteToUTF8String(dataInBytes);
@Override public Object getDataBasedOnColumnList(Map<CarbonDimension, ByteBuffer> childBuffer, CarbonDimension presentColumn) { // Traverse through the Complex Tree and check if the at present column is same as the // column present in the child column then fill it up else add null to the column. if (childBuffer.get(presentColumn) != null) { if (presentColumn.getNumberOfChild() > 0) { // This is complex Column. And all its child will be present in the corresponding data // buffer. Object field = getDataBasedOnDataType(childBuffer.get(presentColumn)); return field; } else { // This is a child column with with primitive data type. Object field = children.get(0) .getDataBasedOnColumn(childBuffer.get(presentColumn), presentColumn, presentColumn); return field; } } else { int childLength; childLength = presentColumn.getNumberOfChild(); Object[] fields = new Object[childLength]; for (int i = 0; i < childLength; i++) { fields[i] = children.get(i) .getDataBasedOnColumnList(childBuffer, presentColumn.getListOfChildDimensions().get(i)); } return DataTypeUtil.getDataTypeConverter().wrapWithGenericRow(fields); } } }
return bigDecimalToByte(javaDecVal); } else { return getDataTypeConverter().convertFromStringToByte(data);
private void initializeAtFirstRow() throws IOException { filterValues = new Object[carbonTable.getDimensionOrdinalMax() + measureCount]; filterRow = new RowImpl(); filterRow.setValues(filterValues); outputValues = new Object[projection.length]; outputRow = new GenericInternalRow(outputValues); Path file = fileSplit.getPath(); byte[] syncMarker = getSyncMarker(file.toString()); FileSystem fs = file.getFileSystem(hadoopConf); int bufferSize = Integer.parseInt(hadoopConf.get(CarbonStreamInputFormat.READ_BUFFER_SIZE, CarbonStreamInputFormat.READ_BUFFER_SIZE_DEFAULT)); FSDataInputStream fileIn = fs.open(file, bufferSize); fileIn.seek(fileSplit.getStart()); input = new StreamBlockletReader(syncMarker, fileIn, fileSplit.getLength(), fileSplit.getStart() == 0, compressorName); cacheProvider = CacheProvider.getInstance(); cache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY); queryTypes = CarbonStreamInputFormat.getComplexDimensions(carbonTable, storageColumns, cache); outputSchema = new StructType((StructField[]) DataTypeUtil.getDataTypeConverter().convertCarbonSchemaToSparkSchema(projection)); }
@Override public Object getDataBasedOnColumn(ByteBuffer dataBuffer, CarbonDimension parent, CarbonDimension child) { int childLength; if (parent.getOrdinal() < child.getOrdinal()) { childLength = parent.getNumberOfChild(); Object[] fields = new Object[childLength]; for (int i = 0; i < childLength; i++) { fields[i] = children.get(i) .getDataBasedOnColumn(dataBuffer, parent.getListOfChildDimensions().get(i), child); } return DataTypeUtil.getDataTypeConverter().wrapWithGenericRow(fields); } else if (parent.getOrdinal() > child.getOrdinal()) { return null; } else { // childLength = dataBuffer.getShort(); Object field = getDataBasedOnDataType(dataBuffer); return field; } }
DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(defaultValue);
Object getMeasureData(ColumnPage dataChunk, int index, CarbonMeasure carbonMeasure) { if (!dataChunk.getNullBits().get(index)) { DataType dataType = carbonMeasure.getDataType(); if (dataType == DataTypes.BOOLEAN) { return dataChunk.getBoolean(index); } else if (dataType == DataTypes.SHORT) { return (short) dataChunk.getLong(index); } else if (dataType == DataTypes.INT) { return (int) dataChunk.getLong(index); } else if (dataType == DataTypes.LONG) { return dataChunk.getLong(index); } else if (dataType == DataTypes.FLOAT) { return dataChunk.getFloat(index); } else if (dataType == DataTypes.BYTE) { return dataChunk.getByte(index); } else if (DataTypes.isDecimal(dataType)) { BigDecimal bigDecimalMsrValue = dataChunk.getDecimal(index); if (null != bigDecimalMsrValue && carbonMeasure.getScale() > bigDecimalMsrValue.scale()) { bigDecimalMsrValue = bigDecimalMsrValue.setScale(carbonMeasure.getScale(), RoundingMode.HALF_UP); } // convert data type as per the computing engine return DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal( bigDecimalMsrValue); } else { return dataChunk.getDouble(index); } } return null; }
return null; return getDataTypeConverter().convertFromBigDecimalToDecimal(byteToBigDecimal(dataInBytes)); } else { return getDataTypeConverter().convertFromByteToUTF8String(dataInBytes);
protected void fillMeasureData(Object[] msrValues, int offset, BlockletScannedResult scannedResult) { int measureExistIndex = 0; for (short i = 0; i < measureInfo.getMeasureDataTypes().length; i++) { // if measure exists is block then pass measure column // data chunk to the collector if (measureInfo.getMeasureExists()[i]) { ProjectionMeasure queryMeasure = executionInfo.getProjectionMeasures()[measureExistIndex]; msrValues[i + offset] = getMeasureData( scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex]), scannedResult.getCurrentRowId(), queryMeasure.getMeasure()); measureExistIndex++; } else if (DataTypes.isDecimal(measureInfo.getMeasureDataTypes()[i])) { // if not then get the default value msrValues[i + offset] = DataTypeUtil.getDataTypeConverter() .convertFromBigDecimalToDecimal(measureDefaultValues[i]); } else { msrValues[i + offset] = measureDefaultValues[i]; } } }
dictionaryColumnIndex++; } else if (queryDimensions[i].getDimension().getDataType() == DataTypes.STRING) { row[order[i]] = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8String( (byte[])dimensionInfo.getDefaultValues()[i]); } else {
protected void fillMeasureData(Object[] msrValues, int offset, BlockletScannedResult scannedResult) { int measureExistIndex = 0; for (short i = 0; i < measureInfo.getMeasureDataTypes().length; i++) { // if measure exists is block then pass measure column // data chunk to the collector if (measureInfo.getMeasureExists()[i]) { ProjectionMeasure queryMeasure = executionInfo.getProjectionMeasures()[measureExistIndex]; msrValues[i + offset] = getMeasureData( scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex]), scannedResult.getCurrentRowId(), queryMeasure.getMeasure()); measureExistIndex++; } else { // if not then get the default value and use that value in aggregation Object defaultValue = measureInfo.getDefaultValues()[i]; if (null != defaultValue && DataTypes.isDecimal(measureInfo.getMeasureDataTypes()[i])) { // convert data type as per the computing engine defaultValue = DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(defaultValue); } msrValues[i + offset] = defaultValue; } } }
value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value); model.isCompactionFlow() && value != null) { value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value);
} else if (actualQueryDimensions[i].getDimension().getDataType() == DataTypes.STRING) { newColumnDefaultValue = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8Bytes( CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY); } else {
DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(v);