private void init() { outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; outputDecimal64AbsMax = HiveDecimalWritable.getDecimal64AbsMax( outputDecimalTypeInfo.getPrecision()); }
private boolean checkDecimal64(boolean tryDecimal64, TypeInfo typeInfo) { if (!tryDecimal64 || !(typeInfo instanceof DecimalTypeInfo)) { return false; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; boolean result = HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.getPrecision()); return result; }
private boolean checkDecimal64(boolean tryDecimal64, TypeInfo typeInfo) { if (!tryDecimal64 || !(typeInfo instanceof DecimalTypeInfo)) { return false; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; boolean result = HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.getPrecision()); return result; }
private boolean checkDecimal64(boolean tryDecimal64, TypeInfo typeInfo) { if (!tryDecimal64 || !(typeInfo instanceof DecimalTypeInfo)) { return false; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; boolean result = HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.getPrecision()); return result; }
private boolean checkDecimal64(boolean tryDecimal64, TypeInfo typeInfo) { if (!tryDecimal64 || !(typeInfo instanceof DecimalTypeInfo)) { return false; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; boolean result = HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.getPrecision()); return result; }
@Override public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException { // Intercept result ObjectInspector so we can extract the DECIMAL precision and scale. ObjectInspector resultOI = super.init(m, parameters); if (m == Mode.COMPLETE || m == Mode.FINAL) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(resultOI); resultPrecision = decimalTypeInfo.getPrecision(); resultScale = decimalTypeInfo.getScale(); } return resultOI; }
if (ti instanceof DecimalTypeInfo) { DecimalTypeInfo dti = (DecimalTypeInfo) ti; if (dti.getPrecision() <= TypeDescription.MAX_DECIMAL64_PRECISION && decimal64Support) { useDecimal64 = true;
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumHiveDecimalWritableAgg myagg = (SumHiveDecimalWritableAgg) agg; if (myagg.empty || myagg.sum == null || !myagg.sum.isSet()) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)outputOI.getTypeInfo(); myagg.sum.mutateEnforcePrecisionScale(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (!myagg.sum.isSet()) { LOG.warn("The sum of a column with data type HiveDecimal is out of range"); return null; } result.set(myagg.sum); return result; }
@Override public HiveDecimal getPrimitiveJavaObject(Object o) { if (o == null) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; // We do not want to modify the writable provided by the object o since it is not a copy. HiveDecimalWritable decWritable = ((LazyHiveDecimal)o).getWritableObject(); HiveDecimalWritable result = HiveDecimalWritable.enforcePrecisionScale( decWritable, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); return (result != null && result.isSet() ? result.getHiveDecimal() : null); }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumHiveDecimalWritableAgg myagg = (SumHiveDecimalWritableAgg) agg; if (myagg.empty || myagg.sum == null || !myagg.sum.isSet()) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)outputOI.getTypeInfo(); myagg.sum.mutateEnforcePrecisionScale(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (!myagg.sum.isSet()) { LOG.warn("The sum of a column with data type HiveDecimal is out of range"); return null; } result.set(myagg.sum); return result; }
@Override public void assignRowColumn(VectorizedRowBatch batch, int batchIndex, int columnNum, AggregationBuffer agg) throws HiveException { DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[columnNum]; Aggregation myagg = (Aggregation) agg; final boolean isNull; if (!myagg.isNull) { myagg.sum.mutateEnforcePrecisionScale( outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); isNull = !myagg.sum.isSet(); } else { isNull = true; } if (isNull) { outputColVector.noNulls = false; outputColVector.isNull[batchIndex] = true; return; } outputColVector.isNull[batchIndex] = false; outputColVector.set(batchIndex, myagg.sum); } }
public static HiveDecimal getRandHiveDecimal(Random r, DecimalTypeInfo decimalTypeInfo) { while (true) { final StringBuilder sb = new StringBuilder(); final int precision = 1 + r.nextInt(18); final int scale = 0 + r.nextInt(precision + 1); final int integerDigits = precision - scale; if (r.nextBoolean()) { sb.append("-"); } if (integerDigits == 0) { sb.append("0"); } else { sb.append(RandomTypeUtil.getRandString(r, DECIMAL_CHARS, integerDigits)); } if (scale != 0) { sb.append("."); sb.append(RandomTypeUtil.getRandString(r, DECIMAL_CHARS, scale)); } HiveDecimal dec = HiveDecimal.create(sb.toString()); dec = HiveDecimal.enforcePrecisionScale( dec, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (dec != null) { return dec; } } }
/** * The decimal precision and scale is filled into decimalColumnVector. If the data in * Parquet is in decimal, the precision and scale will come in from decimalMetadata. If parquet * is not in decimal, then this call is made because HMS shows the type as decimal. So, the * precision and scale are picked from hiveType. * * @param decimalMetadata * @param decimalColumnVector */ private void fillDecimalPrecisionScale(DecimalMetadata decimalMetadata, DecimalColumnVector decimalColumnVector) { if (decimalMetadata != null) { decimalColumnVector.precision = (short) type.asPrimitiveType().getDecimalMetadata().getPrecision(); decimalColumnVector.scale = (short) type.asPrimitiveType().getDecimalMetadata().getScale(); } else if (TypeInfoUtils.getBaseName(hiveType.getTypeName()) .equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) { decimalColumnVector.precision = (short) ((DecimalTypeInfo) hiveType).getPrecision(); decimalColumnVector.scale = (short) ((DecimalTypeInfo) hiveType).getScale(); } else { throw new UnsupportedOperationException( "The underlying Parquet type cannot be converted to Hive Decimal type: " + type); } } }
outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); isNull = !myagg.regularDecimalSum.isSet(); } else {
result.mutateDivide(temp); result.mutateEnforcePrecisionScale( outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); if (!result.isSet()) { outputColVector.noNulls = false;
((DecimalTypeInfo) realHiveType).getPrecision() : 0; int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; typeBuilder.setPrecision(decimalTypeInfo.getPrecision()).setScale(decimalTypeInfo.getScale()); break; default:
((DecimalTypeInfo) realHiveType).getPrecision() : 0; int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
return TypeDescription.createDecimal() .withScale(dinfo.getScale()) .withPrecision(dinfo.getPrecision());
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); break; default: