static Parser<Member> nestedDef(Parser<Declaration> dec) { return dec.map(NestedDef::new); }
/** Converts a parser of a collection of {@link Token} to a parser of an array of {@code Token}.*/ static Parser<Token[]> tokens(final Parser<? extends Collection<Token>> parser) { return parser.map(list -> list.toArray(new Token[list.size()])); }
/** * {@code p.asOptional()} is equivalent to {@code p?} in EBNF. {@code Optional.empty()} * is the result when {@code this} fails with no partial match. Note that {@link Optional} * prohibits nulls so make sure {@code this} does not result in {@code null}. * * @since 3.0 */ public final Parser<Optional<T>> asOptional() { return map(Optional::of).optional(Optional.empty()); }
static Parser<Rule> sequential(Parser<Rule> rule) { return rule.many1().map(list -> list.size() == 1 ? list.get(0) : new SequentialRule(list)); }
static Parser<Rule> alternative(Parser<Rule> rule) { return rule.sepBy1(TerminalParser.term("|")).map(list -> list.size() == 1 ? list.get(0) : new AltRule(list)); } }
static Parser<Statement> expression(Parser<Expression> expr) { return expr.followedBy(term(";")).map(ExpressionStatement::new); }
static Lexicon lexicon( Parser<String> wordScanner, Collection<String> keywordNames, StringCase stringCase, final Function<String, ?> defaultMap) { HashMap<String, Object> map = new HashMap<String, Object>(); for (String n : unique(stringCase, keywordNames.toArray(new String[keywordNames.size()]))) { Object value = Tokens.reserved(n); map.put(stringCase.toKey(n), value); } Function<String, Object> keywordMap = stringCase.byKey(map::get); return new Lexicon(keywordMap, wordScanner.map(Lexicon.fallback(keywordMap, defaultMap))); } }
static Parser<Expression> exists(Parser<Relation> relation) { return term("exists").next(relation).map(e -> new UnaryRelationalExpression(e, Op.EXISTS)); }
static Parser<Expression> notExists(Parser<Relation> relation) { return phrase("not exists").next(relation) .map(e -> new UnaryRelationalExpression(e, Op.NOT_EXISTS)); }
static Parser<Statement> throwStatement(Parser<Expression> thrown) { return between(term("throw"), thrown, term(";")).map(ThrowStatement::new); }
static Parser<BinaryOperator<Expression>> conditional(Parser<Expression> consequence) { // "? consequence :" can be think of as a right associative infix operator. // consequence can be the lazy expression, which is everything return consequence.between(term("?"), term(":")) .map(cons -> (cond, alt) -> new ConditionalExpression(cond, cons, alt)); }
static Parser<Statement> expressionList(Parser<Expression> expr) { return expr.sepBy1(term(",")).followedBy(term(";")).map(ExpressionListStatement::new); }
static Parser<LambdaExpression> lambdaExpression( Parser<Expression> expression, Parser<Statement> stmt) { Parser<LambdaExpression.Parameter> typedParam = Parsers.sequence( TypeLiteralParser.TYPE_LITERAL, Terminals.Identifier.PARSER, LambdaExpression.Parameter::new); Parser<LambdaExpression.Parameter> simpleParam = Terminals.Identifier.PARSER.map(LambdaExpression.Parameter::new); Parser<LambdaExpression.Parameter> lambdaParam = typedParam.or(simpleParam); Parser<List<LambdaExpression.Parameter>> params = paren(lambdaParam.sepBy(term(","))).or(lambdaParam.map(Collections::singletonList)); Parser<Statement> body = StatementParser.blockStatement(stmt).<Statement>cast() .or(expression.map(ExpressionStatement::new)); return Parsers.sequence(params, term("->").next(body), LambdaExpression::new); }
static Parser<Statement> returnStatement(Parser<Expression> expr) { return between(term("return"), expr.optional(), term(";")).map(ReturnStatement::new); }
static Parser<Statement> superCall(Parser<Expression> expr) { return between(phrase("super ("), expr.sepBy(term(",")), phrase(") ;")) .map(SuperCallStatement::new); }
/** * A {@link Parser} that greedily runs {@code tokenizer}, and translates line feed characters * ({@code '\n'}) to {@code indent} and {@code outdent} tokens. * Return values are wrapped in {@link Token} objects and collected in a {@link List}. * Patterns recognized by {@code delim} are ignored. */ public Parser<List<Token>> lexer(Parser<?> tokenizer, Parser<?> delim) { Parser<?> lf = Scanners.isChar('\n').retn(Punctuation.LF); return Parsers.or(tokenizer, lf).lexer(delim) .map(tokens -> analyzeIndentations(tokens, Punctuation.LF)); }
static Parser<Statement> thisCall(Parser<Expression> expr) { return between(phrase("this ("), expr.sepBy(term(",")), phrase(") ;")) .map(ThisCallStatement::new); }
static Parser<Relation> union(Parser<Relation> rel) { Parser.Reference<Relation> ref = Parser.newReference(); Parser<Relation> parser = ExpressionParser.paren(ref.lazy()).or(rel).infixl( TerminalParser.term("union").next(TerminalParser.term("all").succeeds()) .label("relation") .map(a -> (l, r) -> new UnionRelation(l, a, r))); ref.set(parser); return parser; }