private ParserRuleContext getParseTree(final String sql) { final SqlBaseLexer sqlBaseLexer = new SqlBaseLexer( new CaseInsensitiveStream(CharStreams.fromString(sql))); final CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer); final SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream); sqlBaseLexer.removeErrorListeners(); sqlBaseLexer.addErrorListener(ERROR_LISTENER); sqlBaseParser.removeErrorListeners(); sqlBaseParser.addErrorListener(ERROR_LISTENER); final Function<SqlBaseParser, ParserRuleContext> parseFunction = SqlBaseParser::statements; try { // first, try parsing with potentially faster SLL mode sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.SLL); return parseFunction.apply(sqlBaseParser); } catch (final ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream sqlBaseParser.reset(); sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.LL); return parseFunction.apply(sqlBaseParser); } }
public void switchToFullLL() { // First rewind the token stream this.tokens.seek(0); // Use full (custom) error reporting now this.parser.setErrorHandler(new ErrorStrategy4()); this.parser.addErrorListener(this); // Now try full LL(*) this.parser.getInterpreter().setPredictionMode(PredictionMode.LL); }
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction) { try { SqlBaseLexer lexer = new SqlBaseLexer( new CaseInsensitiveStream(new ANTLRInputStream(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); lexer.removeErrorListeners(); lexer.addErrorListener(ERROR_LISTENER); parser.removeErrorListeners(); parser.addErrorListener(ERROR_LISTENER); ParserRuleContext tree; try { // first, try parsing with potentially faster SLL mode parser.getInterpreter().setPredictionMode(PredictionMode.SLL); tree = parseFunction.apply(parser); } catch (ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); tree = parseFunction.apply(parser); } return new AstBuilder().visit(tree); } catch (StackOverflowError e) { throw new ParsingException(name + " is too large (stack overflow while parsing)"); } }
tokenStream.seek(0); // rewind input stream parser.reset();
private ParseTree parse(LiquidLexer lexer) { lexer.removeErrorListeners(); lexer.addErrorListener(new BaseErrorListener(){ @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new RuntimeException(String.format("lexer error on line %s, index %s", line, charPositionInLine), e); } }); CommonTokenStream tokens = new CommonTokenStream(lexer); LiquidParser parser = new LiquidParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(new BaseErrorListener(){ @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) { throw new RuntimeException(String.format("parser error on line %s, index %s", line, charPositionInLine), e); } }); parser.getInterpreter().setPredictionMode(PredictionMode.SLL); try { return parser.parse(); } catch (Exception e) { tokens.seek(0); parser.reset(); parser.getInterpreter().setPredictionMode(PredictionMode.LL); return parser.parse(); } }
this.tokens.seek(0);