BigDecimal decimal = normalizeDecimalValue(bigDecimal, precision); if (useConverter) { return converter.convertFromBigDecimalToDecimal(decimal); } else { return decimal;
/** * Method for computing default value for no dictionary * * @param defaultValue * @return */ private static Object getNoDictionaryDefaultValue(DataType datatype, byte[] defaultValue) { Object noDictionaryDefaultValue = null; String value = null; if (!isDefaultValueNull(defaultValue)) { if (datatype == DataTypes.INT) { value = new String(defaultValue, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); noDictionaryDefaultValue = Integer.parseInt(value); } else if (datatype == DataTypes.LONG) { value = new String(defaultValue, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)); noDictionaryDefaultValue = Long.parseLong(value); } else if (datatype == DataTypes.TIMESTAMP) { long timestampValue = ByteUtil.toXorLong(defaultValue, 0, defaultValue.length); noDictionaryDefaultValue = timestampValue * 1000L; } else { noDictionaryDefaultValue = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8Bytes(defaultValue); } } return noDictionaryDefaultValue; }
javaDecVal = javaDecVal.setScale(dimension.getColumnSchema().getScale()); return getDataTypeConverter().convertFromBigDecimalToDecimal(javaDecVal); } else { return getDataTypeConverter().convertFromByteToUTF8String(dataInBytes);
/** * Map data is internally stored as Array<Struct<key,Value>>. So first the data is filled in the * stored format and then each record is separated out to fill key and value separately. This is * because for spark integration it expects the data as ArrayBasedMapData(keyArray, valueArray) * and for SDK it will be an object array in the same format as returned to spark * * @param dataBuffer * @return */ @Override public Object getDataBasedOnDataType(ByteBuffer dataBuffer) { Object[] data = fillData(dataBuffer); if (data == null) { return null; } Object[] keyArray = new Object[data.length]; Object[] valueArray = new Object[data.length]; for (int i = 0; i < data.length; i++) { Object[] keyValue = DataTypeUtil.getDataTypeConverter().unwrapGenericRowToObject(data[i]); keyArray[i] = keyValue[0]; valueArray[i] = keyValue[1]; } return DataTypeUtil.getDataTypeConverter().wrapWithArrayBasedMapData(keyArray, valueArray); }
return null; return converter.convertFromStringToDecimal(data); } else { return converter.convertFromStringToUTF8String(data);
/** * This method will convert the spark decimal to java big decimal type * * @param value * @param type * @return */ private Object getConvertedMeasureValue(Object value, DataType type) { if (DataTypes.isDecimal(type)) { if (value != null) { value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value); } return value; } else { return value; } }
return converter.convertFromStringToUTF8String(dimensionValue);
return bigDecimalToByte(javaDecVal); } else { return getDataTypeConverter().convertFromStringToByte(data);
private void initializeAtFirstRow() throws IOException { filterValues = new Object[carbonTable.getDimensionOrdinalMax() + measureCount]; filterRow = new RowImpl(); filterRow.setValues(filterValues); outputValues = new Object[projection.length]; outputRow = new GenericInternalRow(outputValues); Path file = fileSplit.getPath(); byte[] syncMarker = getSyncMarker(file.toString()); FileSystem fs = file.getFileSystem(hadoopConf); int bufferSize = Integer.parseInt(hadoopConf.get(CarbonStreamInputFormat.READ_BUFFER_SIZE, CarbonStreamInputFormat.READ_BUFFER_SIZE_DEFAULT)); FSDataInputStream fileIn = fs.open(file, bufferSize); fileIn.seek(fileSplit.getStart()); input = new StreamBlockletReader(syncMarker, fileIn, fileSplit.getLength(), fileSplit.getStart() == 0, compressorName); cacheProvider = CacheProvider.getInstance(); cache = cacheProvider.createCache(CacheType.FORWARD_DICTIONARY); queryTypes = CarbonStreamInputFormat.getComplexDimensions(carbonTable, storageColumns, cache); outputSchema = new StructType((StructField[]) DataTypeUtil.getDataTypeConverter().convertCarbonSchemaToSparkSchema(projection)); }
dictionaryColumnIndex++; } else if (queryDimensions[i].getDimension().getDataType() == DataTypes.STRING) { row[order[i]] = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8String( (byte[])dimensionInfo.getDefaultValues()[i]); } else {
value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value); model.isCompactionFlow() && value != null) { value = DataTypeUtil.getDataTypeConverter().convertFromDecimalToBigDecimal(value);
BigDecimal decimal = normalizeDecimalValue(bigDecimal, precision); if (useConverter) { return converter.convertFromBigDecimalToDecimal(decimal); } else { return decimal;
return null; return getDataTypeConverter().convertFromBigDecimalToDecimal(byteToBigDecimal(dataInBytes)); } else { return getDataTypeConverter().convertFromByteToUTF8String(dataInBytes);
} else if (actualQueryDimensions[i].getDimension().getDataType() == DataTypes.STRING) { newColumnDefaultValue = DataTypeUtil.getDataTypeConverter().convertFromByteToUTF8Bytes( CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY); } else {
DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(defaultValue);
Object getMeasureData(ColumnPage dataChunk, int index, CarbonMeasure carbonMeasure) { if (!dataChunk.getNullBits().get(index)) { DataType dataType = carbonMeasure.getDataType(); if (dataType == DataTypes.BOOLEAN) { return dataChunk.getBoolean(index); } else if (dataType == DataTypes.SHORT) { return (short) dataChunk.getLong(index); } else if (dataType == DataTypes.INT) { return (int) dataChunk.getLong(index); } else if (dataType == DataTypes.LONG) { return dataChunk.getLong(index); } else if (dataType == DataTypes.FLOAT) { return dataChunk.getFloat(index); } else if (dataType == DataTypes.BYTE) { return dataChunk.getByte(index); } else if (DataTypes.isDecimal(dataType)) { BigDecimal bigDecimalMsrValue = dataChunk.getDecimal(index); if (null != bigDecimalMsrValue && carbonMeasure.getScale() > bigDecimalMsrValue.scale()) { bigDecimalMsrValue = bigDecimalMsrValue.setScale(carbonMeasure.getScale(), RoundingMode.HALF_UP); } // convert data type as per the computing engine return DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal( bigDecimalMsrValue); } else { return dataChunk.getDouble(index); } } return null; }
protected void fillMeasureData(Object[] msrValues, int offset, BlockletScannedResult scannedResult) { int measureExistIndex = 0; for (short i = 0; i < measureInfo.getMeasureDataTypes().length; i++) { // if measure exists is block then pass measure column // data chunk to the collector if (measureInfo.getMeasureExists()[i]) { ProjectionMeasure queryMeasure = executionInfo.getProjectionMeasures()[measureExistIndex]; msrValues[i + offset] = getMeasureData( scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex]), scannedResult.getCurrentRowId(), queryMeasure.getMeasure()); measureExistIndex++; } else if (DataTypes.isDecimal(measureInfo.getMeasureDataTypes()[i])) { // if not then get the default value msrValues[i + offset] = DataTypeUtil.getDataTypeConverter() .convertFromBigDecimalToDecimal(measureDefaultValues[i]); } else { msrValues[i + offset] = measureDefaultValues[i]; } } }
protected void fillMeasureData(Object[] msrValues, int offset, BlockletScannedResult scannedResult) { int measureExistIndex = 0; for (short i = 0; i < measureInfo.getMeasureDataTypes().length; i++) { // if measure exists is block then pass measure column // data chunk to the collector if (measureInfo.getMeasureExists()[i]) { ProjectionMeasure queryMeasure = executionInfo.getProjectionMeasures()[measureExistIndex]; msrValues[i + offset] = getMeasureData( scannedResult.getMeasureChunk(measureInfo.getMeasureOrdinals()[measureExistIndex]), scannedResult.getCurrentRowId(), queryMeasure.getMeasure()); measureExistIndex++; } else { // if not then get the default value and use that value in aggregation Object defaultValue = measureInfo.getDefaultValues()[i]; if (null != defaultValue && DataTypes.isDecimal(measureInfo.getMeasureDataTypes()[i])) { // convert data type as per the computing engine defaultValue = DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(defaultValue); } msrValues[i + offset] = defaultValue; } } }
DataTypeUtil.getDataTypeConverter().convertFromBigDecimalToDecimal(v);