@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { HiveDecimal dec = left.multiply(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { HiveDecimal dec = left.multiply(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
/** * Performs multiplication, changing the scale of this object to the specified * value. * * @param right * right operand. this object is not modified. * @param newScale * scale of the result. must be 0 or positive. */ public void multiplyDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.multiply(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null multiply result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }
private IntWritable evaluate(HiveDecimal exprValue, HiveDecimal minValue, HiveDecimal maxValue, int numBuckets) { Preconditions.checkArgument(numBuckets > 0, "numBuckets in width_bucket function must be above 0"); Preconditions.checkArgument(!maxValue.equals(minValue), "maxValue cannot be equal to minValue in width_bucket function"); if (maxValue.compareTo(minValue) > 0) { if (exprValue.compareTo(minValue) < 0) { output.set(0); } else if (exprValue.compareTo(maxValue) >= 0) { output.set(numBuckets + 1); } else { output.set(HiveDecimal.create(numBuckets).multiply(exprValue.subtract(minValue)).divide( maxValue.subtract(minValue)).add(HiveDecimal.ONE).intValue()); } } else { if (exprValue.compareTo(minValue) > 0) { output.set(0); } else if (exprValue.compareTo(maxValue) <= 0) { output.set(numBuckets + 1); } else { output.set(HiveDecimal.create(numBuckets).multiply(minValue.subtract(exprValue)).divide( minValue.subtract(maxValue)).add(HiveDecimal.ONE).intValue()); } } return output; }
if (inputVector.noNulls || !inputVector.isNull[0]) { HiveDecimal value = vector[0].getHiveDecimal(); HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize)); myagg.sum.mutateAdd(multiple); myagg.count += batchSize;
if (inputVector.noNulls || !inputVector.isNull[0]) { HiveDecimal value = vector[0].getHiveDecimal(); HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize)); myagg.sum.mutateAdd(multiple); myagg.count += batchSize;
break; case DECIMAL: oneRow.add(HiveDecimal.create(constant).multiply(HiveDecimal.create(rowCnt))); break; default:
break; case DECIMAL: oneRow.add(HiveDecimal.create(constant).multiply(HiveDecimal.create(rowCnt))); break; default:
HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize)); myagg.sum.mutateAdd(multiple);
HiveDecimal multiple = sum.multiply(HiveDecimal.create(batchSize)); myagg.mergeSum.mutateAdd(multiple);
HiveDecimal multiple = sum.multiply(HiveDecimal.create(batchSize)); myagg.mergeSum.mutateAdd(multiple);
HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize)); myagg.sum.mutateAdd(multiple); myagg.count += batchSize;
HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize)); myagg.sum.mutateAdd(multiple);
public static void multiplyChecked(int i, HiveDecimal left, HiveDecimal right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.multiply(right)); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { HiveDecimal dec = left.multiply(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
@Override protected void func(LongColumnVector outV, DecimalColumnVector inV, int i) { HiveDecimal result = inV.vector[i].getHiveDecimal().multiply(tenE9); if (result == null) { outV.noNulls = false; outV.isNull[i] = true; } else { outV.vector[i] = result.longValue(); } } }
public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimal right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.getHiveDecimal().multiply(right)); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
public static void multiplyChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.multiply(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
public static void multiplyChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.getHiveDecimal().multiply(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on overflow outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
/** * Performs multiplication, changing the scale of this object to the specified * value. * * @param right * right operand. this object is not modified. * @param newScale * scale of the result. must be 0 or positive. */ public void multiplyDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.multiply(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null multiply result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }