private IScalarEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException { IScalarEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context); IFunctionDescriptor fd = expr.getFunctionInfo() instanceof IExternalFunctionInfo ? ExternalFunctionDescriptorProvider.getExternalFunctionDescriptor( (IExternalFunctionInfo) expr.getFunctionInfo(), (ICcApplicationContext) context.getAppContext()) : resolveFunction(expr, env, context); return fd.createEvaluatorFactory(args); }
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx; //external datascan operator returns a record as the first field, instead of the last in internal case if (dsType == DatasetType.EXTERNAL) { recordIdx = 0; outColumns[0] = 0; } else { recordIdx = numPrimaryKeys; outColumns[0] = numPrimaryKeys; } for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[i] = i; } if (dataset.hasMetaPart()) { projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc }); }
fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType); return fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldIndexEvalFactory }); IFunctionDescriptor fDesc = functionManager.lookupFunction(BuiltinFunctions.FIELD_ACCESS_BY_NAME); fDesc.setSourceLocation(sourceLoc); return fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldNameEvalFactory }); fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType, fldName); return fDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { recordEvalFactory });
@Test public void testCastLax() throws Exception { IFunctionDescriptor funcDesc = CastTypeLaxDescriptor.FACTORY.createFunctionDescriptor(); funcDesc.setImmutableStates(targetType, inType); ByteArrayOutputStream baos = new ByteArrayOutputStream(); AObjectSerializerDeserializer serDe = AObjectSerializerDeserializer.INSTANCE; serDe.serialize(inValue, new DataOutputStream(baos)); ConstantEvalFactory argEvalFactory = new ConstantEvalFactory(baos.toByteArray()); IScalarEvaluatorFactory evalFactory = funcDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { argEvalFactory }); IHyracksTaskContext ctx = mock(IHyracksTaskContext.class); IScalarEvaluator evaluator = evalFactory.createScalarEvaluator(ctx); VoidPointable resultPointable = new VoidPointable(); evaluator.evaluate(null, resultPointable); ByteArrayInputStream bais = new ByteArrayInputStream(resultPointable.getByteArray(), resultPointable.getStartOffset(), resultPointable.getLength()); IAObject resultValue = serDe.deserialize(new DataInputStream(bais)); Assert.assertTrue(String.format("Expected: %s, actual: %s", targetValue, resultValue), targetValue.deepEqual(resultValue)); }
@Override protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; // tags(2) + primary keys + record + meta part(?) int[] projectionList = new int[NUM_TAG_FIELDS + (dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx = NUM_TAG_FIELDS + numPrimaryKeys; //here we only consider internal dataset assert dsType == DatasetType.INTERNAL; outColumns[0] = NUM_TAG_FIELDS + numPrimaryKeys; int projCount = 0; for (int i = 0; i < NUM_TAG_FIELDS; i++) { projectionList[projCount++] = i; } //set primary keys and the record for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + i; } if (dataset.hasMetaPart()) { projectionList[NUM_TAG_FIELDS + numPrimaryKeys + 1] = NUM_TAG_FIELDS + numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { getTaggedRecordDescriptor(enforcedRecDesc) }); }
fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType); IScalarEvaluatorFactory evalFactory = fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldIndexEvalFactory }); IFunctionInfo finfoAccess = fDesc.setImmutableStates(recType, fldName); IScalarEvaluatorFactory evalFactory = fDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { recordEvalFactory }); IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED);