@Override public void transientInit() throws HiveException { super.transientInit(); bytes = new byte[64]; // staging area for results, to avoid new() calls }
@Override public void transientInit() throws HiveException { super.transientInit(); inSet = new HashSet<Timestamp>(inListValues.length); for (Timestamp val : inListValues) { inSet.add(val); } }
@Override public void transientInit() throws HiveException { super.transientInit(); primitiveCategory = ((PrimitiveTypeInfo) inputTypeInfos[0]).getPrimitiveCategory(); }
@Override public void transientInit() throws HiveException { super.transientInit(); checker = createChecker(pattern); }
@Override public void transientInit() throws HiveException { super.transientInit(); primitiveCategory = ((PrimitiveTypeInfo) inputTypeInfos[0]).getPrimitiveCategory(); }
@Override public void transientInit() throws HiveException { super.transientInit(); initCalendar(); }
@Override public void transientInit() throws HiveException { super.transientInit(); initCalendar(); }
@Override public void transientInit() throws HiveException { super.transientInit(); inSet = new HashSet<HiveDecimalWritable>(inListValues.length); for (HiveDecimal val : inListValues) { inSet.add(new HiveDecimalWritable(val)); } }
@Override public void transientInit() throws HiveException { super.transientInit(); initCalendar(); }
@Override public void transientInit() throws HiveException { super.transientInit(); primitiveCategory = ((PrimitiveTypeInfo) inputTypeInfos[0]).getPrimitiveCategory(); }
@Override public void transientInit() throws HiveException { super.transientInit(); integerPrimitiveCategory = ((PrimitiveTypeInfo) outputTypeInfo).getPrimitiveCategory(); }
@Override public void transientInit() throws HiveException { super.transientInit(); colVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(inputTypeInfos[0]); }
@Override public void transientInit() throws HiveException { super.transientInit(); inSet = new HashSet<HiveDecimalWritable>(inListValues.length); for (HiveDecimal val : inListValues) { inSet.add(new HiveDecimalWritable(val)); } }
@Override public void transientInit() throws HiveException { super.transientInit(); inSet = new CuckooSetDouble(inListValues.length); inSet.load(inListValues); }
@Override public void transientInit() throws HiveException { super.transientInit(); inSet = new CuckooSetLong(inListValues.length); inSet.load(inListValues); }
@Override public void transientInit() throws HiveException { super.transientInit(); dateVector1 = new LongColumnVector(); dateVector2 = new LongColumnVector(); }
@Test public void testVectorBin() throws HiveException { // test conversion of long->string VectorizedRowBatch b = getBatchForStringMath(); BytesColumnVector resultV = (BytesColumnVector) b.cols[2]; b.cols[0].noNulls = true; VectorExpression expr = new FuncBin(1, 2); expr.transientInit(); expr.evaluate(b); String s = new String(resultV.vector[1], resultV.start[1], resultV.length[1]); Assert.assertEquals("11111111", s); }
@Test public void testCastLongToString() throws HiveException { VectorizedRowBatch b = TestVectorMathFunctions.getBatchForStringMath(); BytesColumnVector resultV = (BytesColumnVector) b.cols[2]; b.cols[1].noNulls = true; VectorExpression expr = new CastLongToString(1, 2); expr.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.longTypeInfo}); expr.transientInit(); expr.evaluate(b); byte[] num255 = toBytes("255"); Assert.assertEquals(0, StringExpr.compare(num255, 0, num255.length, resultV.vector[1], resultV.start[1], resultV.length[1])); }
@Test public void testDateAddScalarCol() throws HiveException { for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, true); VectorExpression udf = new VectorUDFDateAddScalarCol("error".getBytes(utf8), 0, 1); udf.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo}); udf.transientInit(); VectorizedRowBatch batch = new VectorizedRowBatch(2, 1); batch.cols[0] = new LongColumnVector(1); batch.cols[1] = new LongColumnVector(1); udf.evaluate(batch); Assert.assertEquals(batch.cols[1].isNull[0], true); }
@Test public void testDateSubScalarCol() throws HiveException { for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, false); VectorExpression udf = new VectorUDFDateSubScalarCol("error".getBytes(utf8), 0, 1); udf.setInputTypeInfos(new TypeInfo[] {TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo}); udf.transientInit(); VectorizedRowBatch batch = new VectorizedRowBatch(2, 1); batch.cols[0] = new LongColumnVector(1); batch.cols[1] = new LongColumnVector(1); udf.evaluate(batch); Assert.assertEquals(batch.cols[1].isNull[0], true); }