/** * Computes a metric identified by its key on a operation AST node. * * @param key The key identifying the metric to be computed * @param node The node on which to compute the metric * @param options The options of the metric * * @return The value of the metric, or {@code Double.NaN} if the value couldn't be computed */ public double computeForOperation(MetricKey<O> key, O node, MetricOptions options) { Objects.requireNonNull(key, NULL_KEY_MESSAGE); Objects.requireNonNull(options, NULL_OPTIONS_MESSAGE); Objects.requireNonNull(node, NULL_NODE_MESSAGE); if (!key.supports(node)) { return Double.NaN; } MetricMemoizer<O> memoizer = getLanguageSpecificProjectMemoizer().getOperationMemoizer(node.getQualifiedName()); return memoizer == null ? Double.NaN : getLanguageSpecificComputer().computeForOperation(key, node, false, options, memoizer); }
/** * Computes a metric identified by its code on a class AST node, possibly selecting a variant with the {@code * MetricOptions} parameter. * * @param key The key identifying the metric to be computed * @param node The node on which to compute the metric * @param options The options of the metric * * @return The value of the metric, or {@code Double.NaN} if the value couldn't be computed */ public double computeForType(MetricKey<T> key, T node, MetricOptions options) { Objects.requireNonNull(key, NULL_KEY_MESSAGE); Objects.requireNonNull(options, NULL_OPTIONS_MESSAGE); Objects.requireNonNull(node, NULL_NODE_MESSAGE); if (!key.supports(node)) { return Double.NaN; } MetricMemoizer<T> memoizer = getLanguageSpecificProjectMemoizer().getClassMemoizer(node.getQualifiedName()); return memoizer == null ? Double.NaN : getLanguageSpecificComputer().computeForType(key, node, false, options, memoizer); }
@Override public double computeWithResultOption(MetricKey<O> key, T node, boolean force, MetricOptions options, ResultOption option, ProjectMemoizer<T, O> stats) { List<O> ops = findOperations(node); List<Double> values = new ArrayList<>(); for (O op : ops) { if (key.supports(op)) { MetricMemoizer<O> opStats = stats.getOperationMemoizer(op.getQualifiedName()); double val = this.computeForOperation(key, op, force, options, opStats); if (val != Double.NaN) { values.add(val); } } } // FUTURE use streams to do that when we upgrade the compiler to 1.8 switch (option) { case SUM: return sum(values); case HIGHEST: return highest(values); case AVERAGE: return average(values); default: return Double.NaN; } }
@Override public Object visit(ASTClassOrInterfaceType node, Object data) { super.visit(node, data); String typeName = node.getImage(); if (node.isAnonymousClass()) { QualifiableNode parent = node.getFirstParentOfAnyType(ASTAllocationExpression.class, ASTEnumConstant.class); if (parent != null) { typeName = parent.getQualifiedName().toString(); } } // FIXME, we should discard the array depth on this node, it should only be known to ASTReferenceType (#910) populateType(node, typeName, node.getArrayDepth()); ASTTypeArguments typeArguments = node.getFirstChildOfType(ASTTypeArguments.class); if (typeArguments != null) { final JavaTypeDefinition[] boundGenerics = new JavaTypeDefinition[typeArguments.jjtGetNumChildren()]; for (int i = 0; i < typeArguments.jjtGetNumChildren(); ++i) { boundGenerics[i] = ((TypeNode) typeArguments.jjtGetChild(i)).getTypeDefinition(); } node.setTypeDefinition(JavaTypeDefinition.forClass(node.getType(), boundGenerics)); } return data; }
/** * Computes a metric identified by its key on a operation AST node. * * @param key The key identifying the metric to be computed * @param node The node on which to compute the metric * @param options The options of the metric * * @return The value of the metric, or {@code Double.NaN} if the value couldn't be computed */ public double computeForOperation(MetricKey<O> key, O node, MetricOptions options) { Objects.requireNonNull(key, NULL_KEY_MESSAGE); Objects.requireNonNull(options, NULL_OPTIONS_MESSAGE); Objects.requireNonNull(node, NULL_NODE_MESSAGE); if (!key.supports(node)) { return Double.NaN; } MetricMemoizer<O> memoizer = getLanguageSpecificProjectMemoizer().getOperationMemoizer(node.getQualifiedName()); return memoizer == null ? Double.NaN : getLanguageSpecificComputer().computeForOperation(key, node, false, options, memoizer); }
/** * Computes a metric identified by its code on a class AST node, possibly selecting a variant with the {@code * MetricOptions} parameter. * * @param key The key identifying the metric to be computed * @param node The node on which to compute the metric * @param options The options of the metric * * @return The value of the metric, or {@code Double.NaN} if the value couldn't be computed */ public double computeForType(MetricKey<T> key, T node, MetricOptions options) { Objects.requireNonNull(key, NULL_KEY_MESSAGE); Objects.requireNonNull(options, NULL_OPTIONS_MESSAGE); Objects.requireNonNull(node, NULL_NODE_MESSAGE); if (!key.supports(node)) { return Double.NaN; } MetricMemoizer<T> memoizer = getLanguageSpecificProjectMemoizer().getClassMemoizer(node.getQualifiedName()); return memoizer == null ? Double.NaN : getLanguageSpecificComputer().computeForType(key, node, false, options, memoizer); }
@Override public double computeWithResultOption(MetricKey<O> key, T node, boolean force, MetricOptions options, ResultOption option, ProjectMemoizer<T, O> stats) { List<O> ops = findOperations(node); List<Double> values = new ArrayList<>(); for (O op : ops) { if (key.supports(op)) { MetricMemoizer<O> opStats = stats.getOperationMemoizer(op.getQualifiedName()); double val = this.computeForOperation(key, op, force, options, opStats); if (val != Double.NaN) { values.add(val); } } } // FUTURE use streams to do that when we upgrade the compiler to 1.8 switch (option) { case SUM: return sum(values); case HIGHEST: return highest(values); case AVERAGE: return average(values); default: return Double.NaN; } }
@Override public Object visit(ASTClassOrInterfaceType node, Object data) { super.visit(node, data); String typeName = node.getImage(); if (node.isAnonymousClass()) { QualifiableNode parent = node.getFirstParentOfAnyType(ASTAllocationExpression.class, ASTEnumConstant.class); if (parent != null) { typeName = parent.getQualifiedName().toString(); } } // FIXME, we should discard the array depth on this node, it should only be known to ASTReferenceType (#910) populateType(node, typeName, node.getArrayDepth()); ASTTypeArguments typeArguments = node.getFirstChildOfType(ASTTypeArguments.class); if (typeArguments != null) { final JavaTypeDefinition[] boundGenerics = new JavaTypeDefinition[typeArguments.jjtGetNumChildren()]; for (int i = 0; i < typeArguments.jjtGetNumChildren(); ++i) { boundGenerics[i] = ((TypeNode) typeArguments.jjtGetChild(i)).getTypeDefinition(); } node.setTypeDefinition(JavaTypeDefinition.forClass(node.getType(), boundGenerics)); } return data; }