public static Block decimalMapBlockOf(DecimalType type, BigDecimal decimal) { if (type.isShort()) { long longDecimal = decimal.unscaledValue().longValue(); return mapBlockOf(type, type, longDecimal, longDecimal); } else { Slice sliceDecimal = Decimals.encodeUnscaledValue(decimal.unscaledValue()); return mapBlockOf(type, type, sliceDecimal, sliceDecimal); } }
public static Block decimalArrayBlockOf(DecimalType type, BigDecimal decimal) { if (type.isShort()) { long longDecimal = decimal.unscaledValue().longValue(); return arrayBlockOf(type, longDecimal); } else { Slice sliceDecimal = Decimals.encodeUnscaledValue(decimal.unscaledValue()); return arrayBlockOf(type, sliceDecimal); } }
public static BigDecimal readBigDecimal(DecimalType type, Block block, int position) { BigInteger unscaledValue = type.isShort() ? BigInteger.valueOf(type.getLong(block, position)) : decodeUnscaledValue(type.getSlice(block, position)); return new BigDecimal(unscaledValue, type.getScale(), new MathContext(type.getPrecision())); }
public static PrimitiveColumnReader createReader(RichColumnDescriptor descriptor, int precision, int scale) { DecimalType decimalType = DecimalType.createDecimalType(precision, scale); if (decimalType.isShort()) { return new ShortDecimalColumnReader(descriptor); } else { return new LongDecimalColumnReader(descriptor); } } }
public static ReadMapping decimalReadMapping(DecimalType decimalType) { // JDBC driver can return BigDecimal with lower scale than column's scale when there are trailing zeroes int scale = decimalType.getScale(); if (decimalType.isShort()) { return longReadMapping(decimalType, (resultSet, columnIndex) -> encodeShortScaledValue(resultSet.getBigDecimal(columnIndex), scale)); } return sliceReadMapping(decimalType, (resultSet, columnIndex) -> encodeScaledValue(resultSet.getBigDecimal(columnIndex), scale)); }
@Override public Map<Integer, ColumnStatistics> finishRowGroup() { checkState(!closed); ColumnStatistics statistics; if (type.isShort()) { statistics = shortDecimalStatisticsBuilder.buildColumnStatistics(); shortDecimalStatisticsBuilder = new ShortDecimalStatisticsBuilder(type.getScale()); } else { statistics = longDecimalStatisticsBuilder.buildColumnStatistics(); longDecimalStatisticsBuilder = new LongDecimalStatisticsBuilder(); } rowGroupColumnStatistics.add(statistics); return ImmutableMap.of(column, statistics); }
private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position) { BigInteger unscaledValue; if (decimalType.isShort()) { unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position)); } else { unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position)); } return HiveDecimal.create(unscaledValue, decimalType.getScale()); }
DecimalType sourceDecimal = (DecimalType) source; DecimalType resultDecimal = (DecimalType) result; boolean sameDecimalSubtype = (sourceDecimal.isShort() && resultDecimal.isShort()) || (!sourceDecimal.isShort() && !resultDecimal.isShort()); boolean sameScale = sourceDecimal.getScale() == resultDecimal.getScale(); boolean sourcePrecisionIsLessOrEqualToResultPrecision = sourceDecimal.getPrecision() <= resultDecimal.getPrecision();
public DecimalColumnWriter(int column, Type type, CompressionKind compression, int bufferSize, OrcEncoding orcEncoding) { checkArgument(column >= 0, "column is negative"); checkArgument(orcEncoding != DWRF, "DWRF does not support %s type", type); this.column = column; this.type = (DecimalType) requireNonNull(type, "type is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.dataStream = new DecimalOutputStream(compression, bufferSize); this.scaleStream = new LongOutputStreamV2(compression, bufferSize, true, SECONDARY); this.presentStream = new PresentOutputStream(compression, bufferSize); if (this.type.isShort()) { shortDecimalStatisticsBuilder = new ShortDecimalStatisticsBuilder(this.type.getScale()); } else { longDecimalStatisticsBuilder = new LongDecimalStatisticsBuilder(); } }
if (dtype.isShort()) { return row.getDecimal(field).unscaledValue().longValue();
filter.add(PARTITION_VALUE_WILDCARD); else if (type instanceof DecimalType && !((DecimalType) type).isShort()) { Slice slice = (Slice) value; filter.add(Decimals.toString(slice, ((DecimalType) type).getScale())); else if (type instanceof DecimalType && ((DecimalType) type).isShort()) { filter.add(Decimals.toString((long) value, ((DecimalType) type).getScale()));
private void parseDecimalColumn(int column) { loaded[column] = true; Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]); if (fieldData == null) { nulls[column] = true; } else { Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData); checkState(fieldValue != null, "fieldValue should not be null"); HiveDecimal decimal = (HiveDecimal) fieldValue; DecimalType columnType = (DecimalType) types[column]; BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale()); if (columnType.isShort()) { longs[column] = unscaledDecimal.longValue(); } else { slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal); } nulls[column] = false; } }
private static InternalAggregationFunction generateAggregation(Type inputType, Type outputType) { checkArgument(inputType instanceof DecimalType, "type must be Decimal"); DynamicClassLoader classLoader = new DynamicClassLoader(DecimalSumAggregation.class.getClassLoader()); List<Type> inputTypes = ImmutableList.of(inputType); MethodHandle inputFunction; Class<? extends AccumulatorState> stateInterface = LongDecimalWithOverflowState.class; AccumulatorStateSerializer<?> stateSerializer = new LongDecimalWithOverflowStateSerializer(); if (((DecimalType) inputType).isShort()) { inputFunction = SHORT_DECIMAL_INPUT_FUNCTION; } else { inputFunction = LONG_DECIMAL_INPUT_FUNCTION; } AggregationMetadata metadata = new AggregationMetadata( generateAggregationName(NAME, outputType.getTypeSignature(), inputTypes.stream().map(Type::getTypeSignature).collect(toImmutableList())), createInputParameterMetadata(inputType), inputFunction.bindTo(inputType), COMBINE_FUNCTION, LONG_DECIMAL_OUTPUT_FUNCTION.bindTo(outputType), ImmutableList.of(new AccumulatorStateDescriptor( stateInterface, stateSerializer, new LongDecimalWithOverflowStateFactory())), outputType); Type intermediateType = stateSerializer.getSerializedType(); GenericAccumulatorFactoryBinder factory = AccumulatorCompiler.generateAccumulatorFactoryBinder(metadata, classLoader); return new InternalAggregationFunction(NAME, inputTypes, ImmutableList.of(intermediateType), outputType, true, false, factory); }
AccumulatorStateSerializer<?> stateSerializer = new LongDecimalWithOverflowAndLongStateSerializer(); if (((DecimalType) type).isShort()) { inputFunction = SHORT_DECIMAL_INPUT_FUNCTION; outputFunction = SHORT_DECIMAL_OUTPUT_FUNCTION;
if (decimalType.isShort()) { statisticsBuilder = new ShortDecimalStatisticsBuilder((decimalType).getScale());
if (type.isShort()) { for (int position = 0; position < block.getPositionCount(); position++) { if (!block.isNull(position)) {
protected static SqlDecimal decimal(String decimalString) { DecimalParseResult parseResult = Decimals.parseIncludeLeadingZerosInPrecision(decimalString); BigInteger unscaledValue; if (parseResult.getType().isShort()) { unscaledValue = BigInteger.valueOf((Long) parseResult.getObject()); } else { unscaledValue = Decimals.decodeUnscaledValue((Slice) parseResult.getObject()); } return new SqlDecimal(unscaledValue, parseResult.getType().getPrecision(), parseResult.getType().getScale()); }
if (decimalType.isShort()) { long rescaledDecimal = Decimals.rescale(decimalStream.nextLong(), (int) sourceScale, decimalType.getScale()); decimalType.writeLong(builder, rescaledDecimal); verify(scaleStream != null); long sourceScale = scaleStream.next(); if (decimalType.isShort()) { long rescaledDecimal = Decimals.rescale(decimalStream.nextLong(), (int) sourceScale, decimalType.getScale()); decimalType.writeLong(builder, rescaledDecimal);
DecimalType decimalType = (DecimalType) type; HiveDecimalWritable hiveDecimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveWritableObject(object); if (decimalType.isShort()) { decimalType.writeLong(builder, DecimalUtils.getShortDecimalValue(hiveDecimal, decimalType.getScale()));
if (decimalType.isShort()) { return BigInteger.valueOf(cursor.getLong(field));