@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { AbstractFunctionCallExpression funCallExpr = (AbstractFunctionCallExpression) expr; Object[] samplingParameters = funCallExpr.getOpaqueParameters(); fd.setImmutableStates(samplingParameters[0]); } };
protected IFunctionDescriptor createCastFunction(boolean strictCast) throws AlgebricksException { IFunctionDescriptor castFuncDesc = metadataProvider.getFunctionManager() .lookupFunction(strictCast ? BuiltinFunctions.CAST_TYPE : BuiltinFunctions.CAST_TYPE_LAX); castFuncDesc.setSourceLocation(sourceLoc); castFuncDesc.setImmutableStates(enforcedItemType, itemType); return castFuncDesc; }
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr; IAType t = (IAType) context.getType(fce.getArguments().get(0).getValue()); ATypeTag typeTag = t.getTypeTag(); switch (typeTag) { case OBJECT: { fd.setImmutableStates(t); break; } case ANY: { fd.setImmutableStates(RecordUtil.FULLY_OPEN_RECORD_TYPE); break; } default: { if (strict) { throw new NotImplementedException(fd.getIdentifier().getName() + " for data of type " + t); } else { fd.setImmutableStates(new Object[] { null }); } break; } } } }
@Override public IAggregateEvaluatorFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException { IScalarEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context); IFunctionDescriptor fd = resolveFunction(expr, env, context); switch (fd.getFunctionDescriptorTag()) { case SERIALAGGREGATE: return null; case AGGREGATE: return fd.createAggregateEvaluatorFactory(args); default: throw new IllegalStateException( "Invalid function descriptor " + fd.getFunctionDescriptorTag() + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or " + FunctionDescriptorTag.AGGREGATE); } }
@Override public ISerializedAggregateEvaluatorFactory createSerializableAggregateFunctionFactory( AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException { IScalarEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context); IFunctionDescriptor fd = resolveFunction(expr, env, context); switch (fd.getFunctionDescriptorTag()) { case AGGREGATE: { if (BuiltinFunctions.isAggregateFunctionSerializable(fd.getIdentifier())) { AggregateFunctionCallExpression serialAggExpr = BuiltinFunctions .makeSerializableAggregateFunctionExpression(fd.getIdentifier(), expr.getArguments()); IFunctionDescriptor afdd = resolveFunction(serialAggExpr, env, context); return afdd.createSerializableAggregateEvaluatorFactory(args); } else { throw new AlgebricksException( "Trying to create a serializable aggregate from a non-serializable aggregate function descriptor. (fi=" + expr.getFunctionIdentifier() + ")"); } } case SERIALAGGREGATE: { return fd.createSerializableAggregateEvaluatorFactory(args); } default: throw new IllegalStateException( "Invalid function descriptor " + fd.getFunctionDescriptorTag() + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or " + FunctionDescriptorTag.AGGREGATE); } }
new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength())); IFunctionDescriptor fDesc = functionManager.lookupFunction(BuiltinFunctions.FIELD_ACCESS_BY_INDEX); fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType); return fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldIndexEvalFactory }); new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength())); IFunctionDescriptor fDesc = functionManager.lookupFunction(BuiltinFunctions.FIELD_ACCESS_BY_NAME); fDesc.setSourceLocation(sourceLoc); return fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldNameEvalFactory }); fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType, fldName); return fDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { recordEvalFactory });
@Test public void testCastLax() throws Exception { IFunctionDescriptor funcDesc = CastTypeLaxDescriptor.FACTORY.createFunctionDescriptor(); funcDesc.setImmutableStates(targetType, inType); ByteArrayOutputStream baos = new ByteArrayOutputStream(); AObjectSerializerDeserializer serDe = AObjectSerializerDeserializer.INSTANCE; serDe.serialize(inValue, new DataOutputStream(baos)); ConstantEvalFactory argEvalFactory = new ConstantEvalFactory(baos.toByteArray()); IScalarEvaluatorFactory evalFactory = funcDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { argEvalFactory }); IHyracksTaskContext ctx = mock(IHyracksTaskContext.class); IScalarEvaluator evaluator = evalFactory.createScalarEvaluator(ctx); VoidPointable resultPointable = new VoidPointable(); evaluator.evaluate(null, resultPointable); ByteArrayInputStream bais = new ByteArrayInputStream(resultPointable.getByteArray(), resultPointable.getStartOffset(), resultPointable.getLength()); IAObject resultValue = serDe.deserialize(new DataInputStream(bais)); Assert.assertTrue(String.format("Expected: %s, actual: %s", targetValue, resultValue), targetValue.deepEqual(resultValue)); }
private IScalarEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException { IScalarEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context); IFunctionDescriptor fd = expr.getFunctionInfo() instanceof IExternalFunctionInfo ? ExternalFunctionDescriptorProvider.getExternalFunctionDescriptor( (IExternalFunctionInfo) expr.getFunctionInfo(), (ICcApplicationContext) context.getAppContext()) : resolveFunction(expr, env, context); return fd.createEvaluatorFactory(args); }
private IFunctionDescriptor resolveFunction(AbstractFunctionCallExpression expr, IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException { FunctionIdentifier fnId = expr.getFunctionIdentifier(); IFunctionDescriptor fd = functionManager.lookupFunction(fnId); fd.setSourceLocation(expr.getSourceLocation()); IFunctionTypeInferer fnTypeInfer = functionManager.lookupFunctionTypeInferer(fnId); if (fnTypeInfer != null) { CompilerProperties compilerProps = ((IApplicationContext) context.getAppContext()).getCompilerProperties(); fnTypeInfer.infer(expr, fd, env, compilerProps); } return fd; } }
public FunctionManager(FunctionCollection functionCollection) { Map<Pair<FunctionIdentifier, Integer>, IFunctionDescriptorFactory> functionsMap = new HashMap<>(); Map<FunctionIdentifier, IFunctionTypeInferer> typeInferersMap = new HashMap<>(); for (IFunctionDescriptorFactory descriptorFactory : functionCollection.getFunctionDescriptorFactories()) { FunctionIdentifier fid = descriptorFactory.createFunctionDescriptor().getIdentifier(); functionsMap.put(new Pair<>(fid, fid.getArity()), descriptorFactory); IFunctionTypeInferer typeInferer = descriptorFactory.createFunctionTypeInferer(); if (typeInferer != null) { typeInferersMap.put(fid, typeInferer); } } this.functions = functionsMap; this.typeInferers = typeInferersMap; }
@Override public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(StatefulFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException { IScalarEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context); return resolveFunction(expr, env, context).createRunningAggregateEvaluatorFactory(args); }
new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength())); IFunctionDescriptor fDesc = functionManager.lookupFunction(BuiltinFunctions.FIELD_ACCESS_BY_INDEX); fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType); IScalarEvaluatorFactory evalFactory = fDesc.createEvaluatorFactory( new IScalarEvaluatorFactory[] { recordEvalFactory, fldIndexEvalFactory }); IFunctionInfo finfoAccess = fDesc.setSourceLocation(sourceLoc); fDesc.setImmutableStates(recType, fldName); IScalarEvaluatorFactory evalFactory = fDesc.createEvaluatorFactory(new IScalarEvaluatorFactory[] { recordEvalFactory }); IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED);
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx; //external datascan operator returns a record as the first field, instead of the last in internal case if (dsType == DatasetType.EXTERNAL) { recordIdx = 0; outColumns[0] = 0; } else { recordIdx = numPrimaryKeys; outColumns[0] = numPrimaryKeys; } for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[i] = i; } if (dataset.hasMetaPart()) { projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc }); }
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { fd.setImmutableStates(context.getType(expr)); } };
@Override protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; // tags(2) + primary keys + record + meta part(?) int[] projectionList = new int[NUM_TAG_FIELDS + (dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx = NUM_TAG_FIELDS + numPrimaryKeys; //here we only consider internal dataset assert dsType == DatasetType.INTERNAL; outColumns[0] = NUM_TAG_FIELDS + numPrimaryKeys; int projCount = 0; for (int i = 0; i < NUM_TAG_FIELDS; i++) { projectionList[projCount++] = i; } //set primary keys and the record for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + i; } if (dataset.hasMetaPart()) { projectionList[NUM_TAG_FIELDS + numPrimaryKeys + 1] = NUM_TAG_FIELDS + numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { getTaggedRecordDescriptor(enforcedRecDesc) }); }
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) { fd.setImmutableStates(compilerProps.getStringOffset()); } };
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { AbstractFunctionCallExpression funCallExpr = (AbstractFunctionCallExpression) expr; Object[] sortingParameters = funCallExpr.getOpaqueParameters(); fd.setImmutableStates(sortingParameters[0], sortingParameters[1]); } };
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { ARecordType rt = (ARecordType) context.getType(expr); fd.setImmutableStates(rt, computeOpenFields((AbstractFunctionCallExpression) expr, rt)); }
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr; IAType t = (IAType) context.getType(fce.getArguments().get(0).getValue()); fd.setImmutableStates(TypeComputeUtils.getActualType(t)); } };
@Override public void infer(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context, CompilerProperties compilerProps) throws AlgebricksException { AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; IAType rt = TypeCastUtils.getRequiredType(funcExpr); IAType it = (IAType) context.getType(funcExpr.getArguments().get(0).getValue()); fd.setImmutableStates(rt, it); } }