private Expression eval(StructLike struct) { this.struct = struct; return ExpressionVisitors.visit(expr, this); }
private boolean eval(StructLike row) { this.struct = row; return ExpressionVisitors.visit(expr, this); }
@Override public Expression project(Expression expr) { // projections assume that there are no NOT nodes in the expression tree. to ensure that this // is the case, the expression is rewritten to push all NOT nodes down to the expression // leaf nodes. // this is necessary to ensure that the default expression returned when a predicate can't be // projected is correct. return ExpressionVisitors.visit(ExpressionVisitors.visit(expr, RewriteNot.get()), this); }
private boolean eval(ManifestFile manifest) { this.stats = manifest.partitions(); if (stats == null) { return ROWS_MIGHT_MATCH; } return ExpressionVisitors.visit(expr, this); }
public static Expression rewriteNot(Expression expr) { return ExpressionVisitors.visit(expr, RewriteNot.get()); }
/** * Replaces all unbound/named references with bound references to fields in the given struct. * <p> * When a reference is resolved, any literal used in a predicate for that field is converted to * the field's type using {@link Literal#to(Type)}. If automatic conversion to that type isn't * allowed, a {@link ValidationException validation exception} is thrown. * <p> * The result expression may be simplified when constructed. For example, {@code isNull("a")} is * replaced with {@code alwaysFalse()} when {@code "a"} is resolved to a required field. * <p> * The expression cannot contain references that are already bound, or an * {@link IllegalStateException} will be thrown. * * @param struct The {@link StructType struct type} to resolve references by name. * @param expr An {@link Expression expression} to rewrite with bound references. * @return the expression rewritten with bound references * @throws ValidationException if literals do not match bound references * @throws IllegalStateException if any references are already bound */ public static Expression bind(StructType struct, Expression expr) { return ExpressionVisitors.visit(expr, new BindVisitor(struct)); }
public static Set<Integer> boundReferences(StructType struct, List<Expression> exprs) { if (exprs == null) { return ImmutableSet.of(); } ReferenceVisitor visitor = new ReferenceVisitor(); for (Expression expr : exprs) { ExpressionVisitors.visit(bind(struct, expr), visitor); } return visitor.references; }
static FilterCompat.Filter convertColumnFilter(Schema schema, String column, Expression expr) { FilterPredicate pred = visit(expr, new ConvertColumnFilterToParquet(schema, column)); // TODO: handle AlwaysFalse.INSTANCE if (pred != null && pred != AlwaysTrue.INSTANCE) { // FilterCompat will apply LogicalInverseRewriter return FilterCompat.get(pred); } else { return FilterCompat.NOOP; } }
static FilterCompat.Filter convert(Schema schema, Expression expr) { FilterPredicate pred = visit(expr, new ConvertFilterToParquet(schema)); // TODO: handle AlwaysFalse.INSTANCE if (pred != null && pred != AlwaysTrue.INSTANCE) { // FilterCompat will apply LogicalInverseRewriter return FilterCompat.get(pred); } else { return FilterCompat.NOOP; } }
public static void assertAllReferencesBound(String message, Expression expr) { ExpressionVisitors.visit(expr, new CheckReferencesBound(message)); }
public static Expression convert(com.netflix.iceberg.expressions.Expression filter, Schema schema) { return visit(Binder.bind(schema.asStruct(), filter), new ExpressionToSpark(schema)); }
private boolean eval(MessageType fileSchema, BlockMetaData rowGroup) { if (rowGroup.getRowCount() <= 0) { return ROWS_CANNOT_MATCH; } this.stats = Maps.newHashMap(); this.valueCounts = Maps.newHashMap(); this.conversions = Maps.newHashMap(); for (ColumnChunkMetaData col : rowGroup.getColumns()) { PrimitiveType colType = fileSchema.getType(col.getPath().toArray()).asPrimitiveType(); if (colType.getId() != null) { int id = colType.getId().intValue(); stats.put(id, col.getStatistics()); valueCounts.put(id, col.getValueCount()); conversions.put(id, converterFromParquet(colType)); } } return ExpressionVisitors.visit(expr, this); }
case NOT: Not not = (Not) expr; return visitor.not(visit(not.child(), visitor)); case AND: And and = (And) expr; return visitor.and(visit(and.left(), visitor), visit(and.right(), visitor)); case OR: Or or = (Or) expr; return visitor.or(visit(or.left(), visitor), visit(or.right(), visitor)); default: throw new UnsupportedOperationException(
private boolean eval(DataFile file) { if (file.recordCount() <= 0) { return ROWS_CANNOT_MATCH; } this.valueCounts = file.valueCounts(); this.nullCounts = file.nullValueCounts(); this.lowerBounds = file.lowerBounds(); this.upperBounds = file.upperBounds(); return ExpressionVisitors.visit(expr, this); }
private boolean eval(DataFile file) { if (file.recordCount() <= 0) { return ROWS_MUST_MATCH; } this.valueCounts = file.valueCounts(); this.nullCounts = file.nullValueCounts(); this.lowerBounds = file.lowerBounds(); this.upperBounds = file.upperBounds(); return ExpressionVisitors.visit(expr, this); }
private boolean eval(MessageType fileSchema, BlockMetaData rowGroup, DictionaryPageReadStore dictionaries) { this.dictionaries = dictionaries; this.dictCache = Maps.newHashMap(); this.isFallback = Maps.newHashMap(); this.cols = Maps.newHashMap(); this.conversions = Maps.newHashMap(); for (ColumnDescriptor desc : fileSchema.getColumns()) { PrimitiveType colType = fileSchema.getType(desc.getPath()).asPrimitiveType(); if (colType.getId() != null) { int id = colType.getId().intValue(); cols.put(id, desc); conversions.put(id, converterFromParquet(colType)); } } for (ColumnChunkMetaData meta : rowGroup.getColumns()) { PrimitiveType colType = fileSchema.getType(meta.getPath().toArray()).asPrimitiveType(); if (colType.getId() != null) { int id = colType.getId().intValue(); isFallback.put(id, hasNonDictionaryPages(meta)); } } return ExpressionVisitors.visit(expr, this); }